In [1]:
import numpy as np 
import glob
import os
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import cv2
import pickle
%matplotlib inline

Camera Calibration

In [52]:
# First calibrate the camera
CALIBRATION_FOLDER_NAME = "camera_cal"

# read in the images
images_path = glob.glob(os.path.join(CALIBRATION_FOLDER_NAME, "calibration*.jpg"))
print(images_path) 
['camera_cal/calibration10.jpg', 'camera_cal/calibration11.jpg', 'camera_cal/calibration12.jpg', 'camera_cal/calibration13.jpg', 'camera_cal/calibration14.jpg', 'camera_cal/calibration15.jpg', 'camera_cal/calibration16.jpg', 'camera_cal/calibration17.jpg', 'camera_cal/calibration18.jpg', 'camera_cal/calibration19.jpg', 'camera_cal/calibration2.jpg', 'camera_cal/calibration20.jpg', 'camera_cal/calibration21.jpg', 'camera_cal/calibration3.jpg', 'camera_cal/calibration4.jpg', 'camera_cal/calibration5.jpg', 'camera_cal/calibration6.jpg', 'camera_cal/calibration7.jpg', 'camera_cal/calibration8.jpg', 'camera_cal/calibration9.jpg']
In [53]:
# Look at an image to see how many corners are in the chessboard 
test_image = mpimg.imread(images_path[np.random.randint(len(images_path))])
plt.imshow(test_image)
Out[53]:
<matplotlib.image.AxesImage at 0x12a65a278>
In [54]:
# Define the number of corners
nx = 9
ny = 6

# define the object points, these are the points that are on the chessboard
objp = np.zeros((ny * nx, 3), np.float32)
objp[:,:2] = np.mgrid[0:nx, 0:ny].T.reshape(-2, 2) # x, y coordinates
In [55]:
# Now define the corners on the images
# The goal here is to map the imgpoints (which are the coordinates on the image) to our 
# predefined world coordinates

imgpoints = [] # these are the 3D points in the real world space
objpoints = [] # these are the 2D points in the image plane
chessboards_with_drawn_corners = []

for image_path in images_path:
    # if matplotlib is used
    image = mpimg.imread(image_path)
    gray = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
    
    
    # if cv2 is used although we don't use it to plot as it can't plot inline
#     image = cv2.imread(image_path)
#     print(image)
#     break
    gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) # note that we use BGR2GRAY 
    
    ret, corners = cv2.findChessboardCorners(gray, (nx, ny), None)
    
    if ret == True:
        imgpoints.append(corners)
        objpoints.append(objp) 
        chessboard_with_drawn_corners = cv2.drawChessboardCorners(image, (nx, ny), corners, ret)
        chessboards_with_drawn_corners.append(chessboard_with_drawn_corners) 
        
#         cv2.imshow('img', chessboard_with_drawn_corners)
#         cv2.waitKey(500)
In [56]:
# Plot the chessboard with the corners drawn
plt.imshow(chessboards_with_drawn_corners[np.random.randint(len(chessboards_with_drawn_corners))])
Out[56]:
<matplotlib.image.AxesImage at 0x12a041d30>
In [57]:
# Finally calibrate the camera
test_image_path = os.path.join(CALIBRATION_FOLDER_NAME, "test_for_calibration.jpg")
test_image = plt.imread(test_image_path)
test_image_size = (test_image.shape[1], test_image.shape[0])
ret, mtx, dist, rvecs, tvecs = cv2.calibrateCamera(objpoints, imgpoints, test_image_size, None, None)
In [58]:
# Plot test image before applying undistorting
plt.imshow(test_image)
Out[58]:
<matplotlib.image.AxesImage at 0x12d1d0668>
In [59]:
# Plot test image after applying undistortion
dest = cv2.undistort(test_image, mtx, dist, None, mtx)
plt.imshow(dest)
Out[59]:
<matplotlib.image.AxesImage at 0x12d0febe0>
In [60]:
# Now lets aggregate these into a function

def calibrate_camera(calibration_image_paths, test_image_path, nx, ny):
    """
    The purpose of this function is to calibrate the camera, it returns the coordinate matrix
    and the distortion matrix
    :::params calibration_image_paths : the path of where the images are stored
              test_image_path : the path of the test image
              nx : number of corners in the x direction
              ny : number of corners in the y direction
              
    """
    # define the object points, these are the points that are on the chessboard
    objp = np.zeros((ny * nx, 3), np.float32)
    objp[:,:2] = np.mgrid[0:nx, 0:ny].T.reshape(-2, 2) # x, y coordinates
    
    # define the image points, these are the points that are in the real world
    # The goal here is to map the imgpoints (which are the coordinates on the image) to our 
    # predefined world coordinates

    imgpoints = [] # these are the 3D points in the real world space
    objpoints = [] # these are the 2D points in the image plane
    chessboards_with_drawn_corners = []

    for image_path in calibration_image_paths:
        
        # if matplotlib is used
        image = mpimg.imread(image_path)
        gray = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)


        # if cv2 is used although we don't use it to plot as it can't plot inline
    #     image = cv2.imread(image_path)
    #     print(image)
    #     break
        gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) # note that we use BGR2GRAY 

        ret, corners = cv2.findChessboardCorners(gray, (nx, ny), None)

        if ret == True:
            imgpoints.append(corners)
            objpoints.append(objp) 
            chessboard_with_drawn_corners = cv2.drawChessboardCorners(image, (nx, ny), corners, ret)
            chessboards_with_drawn_corners.append(chessboard_with_drawn_corners)
        
    ret, mtx, dist, rvecs, tvecs = cv2.calibrateCamera(objpoints, imgpoints, test_image_size, None, None)
    return mtx, dist
In [61]:
# test it
CALIBRATION_FOLDER_NAME = "camera_cal"

# image paths for calibration images
calibration_image_paths = glob.glob(os.path.join(CALIBRATION_FOLDER_NAME, "calibration*.jpg"))

# image path for test image
test_image_path = os.path.join(CALIBRATION_FOLDER_NAME, "test_for_calibration.jpg")

# For the parameters for the calibration
mtx, dist = calibrate_camera(calibration_image_paths, test_image_path, 9, 6)
In [62]:
# plot the undistorted image
test_image = mpimg.imread(test_image_path)
dest = cv2.undistort(test_image, mtx, dist, None, mtx)
In [63]:
f, (ax1, ax2) = plt.subplots(1, 2, figsize = (30, 15))
ax1.imshow(test_image)
ax1.set_title('Original Image', fontsize=50)
ax2.imshow(dest)
ax2.set_title('Undistorted Image', fontsize=50)
Out[63]:
<matplotlib.text.Text at 0x1218415f8>
In [64]:
# Test the calibrated camera on a road image
LANE_LINES_FOLDER_NAME = "test_images"
lane_line_image_paths = glob.glob(os.path.join(LANE_LINES_FOLDER_NAME, '*.jpg'))
lane_line_images = [mpimg.imread(lane_line_image_path) for lane_line_image_path in lane_line_image_paths]
In [65]:
f, (ax1, ax2) = plt.subplots(1, 2, figsize = (30, 15))
# Plot a sample image
sample_lane_image = lane_line_images[6]
ax1.set_title('Original Image', fontsize=50)
ax1.imshow(sample_lane_image)

# and undistort the image
dest = cv2.undistort(sample_lane_image, mtx, dist, None, mtx)
ax2.imshow(dest)
ax2.set_title('Undistorted Image', fontsize=50)
Out[65]:
<matplotlib.text.Text at 0x12cb08198>

Doing the perspective transform

In [66]:
# Define the size of the original image and the size of the image after
# the perspective transform

ORIGINAL_SIZE = 1280, 720
UNWARPED_SIZE = 1280, 720

def get_src_dst():    
    
    # images used to find the vanishing point
    straight_images = ["test_images/straight_lines1.jpg", "test_images/straight_lines2.jpg"]
    
    # define a triangle that represents the region of interest
    roi_points = np.array([[0, ORIGINAL_SIZE[1] - 50], \
                          [ORIGINAL_SIZE[0], ORIGINAL_SIZE[1] - 50], \
                          [ORIGINAL_SIZE[0]//2, ORIGINAL_SIZE[1]//2 + 50]], dtype = np.int32)
    
    # fill these points in
    roi = np.zeros((ORIGINAL_SIZE[1], ORIGINAL_SIZE[0]), dtype=np.uint8)
    cv2.fillPoly(roi, [roi_points], 1)

    # Define the matrix that are needed to calculate the vanishing point 
    Lhs = np.zeros((2,2), dtype = np.float32)
    Rhs = np.zeros((2,1), dtype = np.float32)

    # Find the straight lines in the image that correspond to the vanishing point
    for img_path in straight_images:
        img = mpimg.imread(img_path)
        img = cv2.undistort(img, mtx, dist)
        img_hls = cv2.cvtColor(img, cv2.COLOR_RGB2HLS)
        edges = cv2.Canny(img_hls[:, :, 1], 100, 200)
        lines = cv2.HoughLinesP(edges * roi, 0.5, np.pi/180, 20, None, 180, 120)
        for line in lines:
            for x1, y1, x2, y2 in line:
                normal = np.array([[-(y2-y1)], [x2-x1]], dtype = np.float32)
    #             print(normal)
    #             print(np.sqrt(np.sum(np.square(normal))))
                normal /= np.linalg.norm(normal)
    #             print(normal)
                point = np.array([[x1], [y1]], dtype = np.float32)
                outer = np.matmul(normal, normal.T)
    #             print(outer)
                Lhs += outer
                Rhs += np.matmul(outer, point)
                cv2.line(img, (x1, y1), (x2, y2), (255, 0, 0), thickness = 2)

    # calculate the vanishing point
    vanishing_point = np.matmul(np.linalg.inv(Lhs), Rhs)
#     print(vanishing_point)

    # Find the source and destination points
    top = vanishing_point[1] + 60
    bottom = ORIGINAL_SIZE[1] - 35
    width = 530

    def on_line(p1, p2, ycoord):
        return[p1[0] + (p2[0] - p1[0])/float(p2[1] - p1[1]) * (ycoord-p1[1]), ycoord]

    p1 = [vanishing_point[0] - width/2, top]
    p2 = [vanishing_point[0] + width/2, top]
    p3 = on_line(p2, vanishing_point, bottom)
    p4 = on_line(p1, vanishing_point, bottom)


    src = np.array([p1,p2,p3,p4], dtype=np.float32)

    dst = np.array([[0, 0], [UNWARPED_SIZE[0], 0],
                           [UNWARPED_SIZE[0], UNWARPED_SIZE[1]],
                           [0, UNWARPED_SIZE[1]]], dtype=np.float32)
    return(src, dst)
print(get_src_dst())
(array([[  374.52423096,   479.32043457],
       [  904.52423096,   479.32043457],
       [ 1812.94226074,   685.        ],
       [ -533.89379883,   685.        ]], dtype=float32), array([[    0.,     0.],
       [ 1280.,     0.],
       [ 1280.,   720.],
       [    0.,   720.]], dtype=float32))
In [67]:
# Helper function for the transform
def warper(image, src, dst):
    M = cv2.getPerspectiveTransform(src, dst)
    image_size = (image.shape[1], image.shape[0])
    warped = cv2.warpPerspective(image, M, image_size, flags=cv2.INTER_LINEAR) 
    return(warped)

# Plot the points where the perspective transform is to take place
def plot_perspective_transform_points(image):
    src, dst = get_src_dst()
    plt.imshow(image)
    plt.title("Points of the persepctive transform")
    plt.plot(src[0][0], src[0][1], 'o')
    plt.plot(src[1][0], src[1][1], 'o')
    plt.plot(src[2][0], src[2][1], 'o')
    plt.plot(src[3][0], src[3][1], 'o')
    plt.show()
In [68]:
# Plot the source points on the original image
plot_perspective_transform_points(dest)
In [69]:
# Plot the transformed image
src, dst = get_src_dst()
warped_lane_line_image = warper(dest, src, dst)
plt.title("Perspective transformed image")
plt.imshow(warped_lane_line_image)
Out[69]:
<matplotlib.image.AxesImage at 0x12c9e7160>

Extracting the lane lines

Experiment with finding the most useful helpful channels

In [70]:
# Create an image wrapper class for easier conversion between color spaces
RGB_COLOR_SPACE = "RGB"
HLS_COLOR_SPACE = "HLS"
LAB_COLOR_SPACE = "LAB"
SINGLE_COLOR_CHANNEL = "Single"

from enum import Enum
class ColorSpace(Enum): 
    RGB = RGB_COLOR_SPACE
    HLS = HLS_COLOR_SPACE
    LAB = LAB_COLOR_SPACE 
    
class ImageWrapper:
    """
    This class is a wrapper class that allows for easier conversion between
    color spaces
    """
    def __init__(self, image, color_space):
        self.image = image
        self.width = image.shape[1]
        self.height = image.shape[0]
        self.color_space = color_space
        
    def _set_color_space(self, new_color_space):
        if self.color_space == new_color_space:
            pass
        else:
            if new_color_space == ColorSpace.RGB:
                if self.color_space == ColorSpace.HLS:
                    self.image = cv2.cvtColor(self.image, cv2.COLOR_HLS2RGB)
                elif self.color_space == ColorSpace.LAB:
                    self.image = cv2.cvtColor(self.image, cv2.COLOR_LAB2RGB)     
                self.color_space = new_color_space
                
            elif new_color_space == ColorSpace.HLS:
                if self.color_space == ColorSpace.RGB:
                    self.image = cv2.cvtColor(self.image, cv2.COLOR_RGB2HLS)
                elif self.color_space == ColorSpace.LAB:
                    self.image = cv2.cvtColor(self.image, cv2.COLOR_LAB2RGB)
                    self.image = cv2.cvtColor(self.image, cv2.COLOR_RGB2HLS)     
                self.color_space = new_color_space
                
            elif new_color_space == ColorSpace.LAB:
                if self.color_space == ColorSpace.RGB:
                    self.image = cv2.cvtColor(self.image, cv2.COLOR_RGB2LAB)
                elif self.color_space == ColorSpace.HLS:
                    self.image = cv2.cvtColor(self.image, cv2.COLOR_HLS2RGB)
                    self.image = cv2.cvtColor(self.image, cv2.COLOR_RGB2LAB)
                self.color_space = new_color_space
            
            
    def get_RGB(self):
        self._set_color_space(ColorSpace.RGB)
        return(self.image)
    def get_R(self):
        self._set_color_space(ColorSpace.RGB)
        return(self.image[:,:,0])
    def get_G(self):
        self._set_color_space(ColorSpace.RGB)
        return(self.image[:,:,1])
    def get_B(self):
        self._set_color_space(ColorSpace.RGB)
        return(self.image[:,:,2])
   
    def get_HLS(self):
        self._set_color_space(ColorSpace.HLS)
        return(self.image)
    def get_H(self):
        self._set_color_space(ColorSpace.HLS)
        return(self.image[:,:,0])
    def get_L(self):
        self._set_color_space(ColorSpace.HLS)
        return(self.image[:,:,1])
    def get_S(self):
        self._set_color_space(ColorSpace.HLS)
        return(self.image[:,:,2]) 
    
    def get_LAB(self):
        self._set_color_space(ColorSpace.LAB)
        return(self.image)
    def get_l(self):
        self._set_color_space(ColorSpace.LAB)
        return(self.image[:,:,0])
    def get_a(self):
        self._set_color_space(ColorSpace.LAB)
        return(self.image[:,:,1])
    def get_b(self):
        self._set_color_space(ColorSpace.LAB)
        return(self.image[:,:,2]) 
    
    def get_gray(self):
        return(sel)
In [71]:
from matplotlib.colors import hsv_to_rgb

def plot_all_color_channels(image):
    """
    The function simply plots all the different kinds of color channels so that 
    it is easier to visual which color channels are more useful
    """
    f, ((ax1, ax2, ax3), (ax4, ax5, ax6), (ax7, ax8, ax9), (ax10, ax11, ax12)) = plt.subplots(4, 3, figsize = (30, 15))
    f.tight_layout()
    wspace = 0.25   # the amount of width reserved for blank space between subplots
    hspace = 0.25   # the amount of height reserved for white space between subplots
    f.subplots_adjust(wspace = wspace, hspace = hspace)
    font = {'size' : 30}
    ax1.imshow(image.get_R(), cmap = 'gray')
    ax1.set_title("R", fontdict=font)
    ax2.imshow(image.get_G(), cmap = 'gray')
    ax2.set_title("G", fontdict=font)
    ax3.imshow(image.get_B(), cmap = 'gray')
    ax3.set_title("B", fontdict = font)
    ax4.imshow(image.get_H(), cmap = 'gray')
    ax4.set_title("H", fontdict = font)
    ax5.imshow(image.get_L(), cmap = 'gray')
    ax5.set_title("L", fontdict = font)
    ax6.imshow(image.get_S(), cmap = 'gray')
    ax6.set_title("S", fontdict = font)
    
    ax7.imshow(image.get_l(), cmap = 'gray')
    ax7.set_title("L", fontdict = font)
    ax8.imshow(image.get_a(), cmap = 'gray')
    ax8.set_title("A", fontdict = font)
    ax9.imshow(image.get_b(), cmap = 'gray')
    ax9.set_title("B", fontdict = font)
    
    ax10.imshow(image.get_RGB())
    ax10.set_title("RGB", fontdict = font)
    ax11.imshow(hsv_to_rgb(image.get_HLS()))
    ax11.set_title("HLS", fontdict = font)
    ax12.imshow(image.get_LAB())
    ax12.set_title("LAB", fontdict = font)
In [72]:
# First undistort the lane image
lane_line_images = [cv2.undistort(image, mtx, dist, None, mtx) for image in lane_line_images]
                    
# Now wrap them in the custom class created above
wrapped_lane_line_images = [ImageWrapper(image, ColorSpace.RGB) for image in lane_line_images]
In [73]:
# Plot the images
sample_image = wrapped_lane_line_images[6]
plot_all_color_channels(sample_image)

It seems like the most useful channel are the S, and B channels to detect yellow lane lines which are in the shadow.

The HLS's L and R, G channels all do a good job at detect the white lane lines, even in the shadows

Experiment with various methods to find lane lines

In [74]:
# Define the red channel for the sample image
sample_image_R = sample_image.get_R()
In [75]:
# Define some generic helper functions and constants
UINT_8_MAX = 255

def scale_image(image, max_val):
    return((image/np.max(image) * max_val))

def binary_mask(image, threshold):
    """
    Takes in an image, and scales it to 0-255, then creates a binary image 
    where the active pixels are set according to the threshold
    
    :::params image: a (length x width) array (only 1 color channel)
              threshold: the numbers which determine the active pixels
    """
    # Get the absolute value of the pixels
    image = np.abs(image)

    # Scale to 0-255
    image = scale_image(image, UINT_8_MAX)
    binary_output = np.zeros_like(image)
    binary_output[(image >= threshold[0]) & (image <= threshold[1])] = 1
    return(binary_output.astype(np.uint8))

# Define a sobel helper function
def sobel_calc(image, kernel_size):
    """
    This function takes an image of a single color channel, and returns a 2-tuple of images
    with the first element of the tuple being the Sobel operator on the x axis, and the second
    element being the Sobel operator on the y axis
    """
    sobel_x = cv2.Sobel(image, cv2.CV_64F, 1, 0, ksize = kernel_size) 
    sobel_y = cv2.Sobel(image, cv2.CV_64F, 0, 1, ksize = kernel_size)
    return((sobel_x), (sobel_y))
In [76]:
def axis_threshold(image, axis, sobel_kernel, sobel_threshold):
    """
    Takes in an image and returns a binary image dependent on the mag_threshold
    :::params image: a (length x width) array (only 1 color channel)
              axis: the direction to apply the sobel operator, 0 is x, 1 is y
              sobel_kernel: the size of the sobel kernel
              sobel_threshold: the max and min sobel values which determines which pixels are active
    """
    image_sobel = sobel_calc(image, sobel_kernel)
    sobel_x = image_sobel[0]
    sobel_y = image_sobel[1]
    if (axis == 0):
        binary_output = binary_mask(sobel_x, sobel_threshold)
    else:
        binary_output = binary_mask(sobel_y, sobel_threshold)
    
    return(binary_output)
In [77]:
# Example use:
axis_sobel_kernel_size = 3
ax_threshold = (30, 255)
sobel_x_R = axis_threshold(sample_image_R, 0, axis_sobel_kernel_size, ax_threshold)
sobel_y_R = axis_threshold(sample_image_R, 1, axis_sobel_kernel_size, ax_threshold)
In [78]:
# Plot the result
f, (ax1, ax2) = plt.subplots(1, 2)
ax1.imshow(sobel_x_R, cmap = 'gray')
ax2.imshow(sobel_y_R, cmap = 'gray')
Out[78]:
<matplotlib.image.AxesImage at 0x125bf4c50>
In [79]:
def magnitude_threshold(image, sobel_kernel, mag_threshold):
    """
    Takes in an image and returns a binary image dependent on the mag_threshold
    :::params image: a (length x width) array (only 1 color channel)
              sobel_kernel: the size of the sobel kernel
              mag_threshold: the max and min magnitudes which determines which pixels are active
    """
    # Get the x, y sobels
    image_sobel = sobel_calc(image, sobel_kernel)
    sobel_x = image_sobel[0]
    sobel_y = image_sobel[1]
    
    # Find the magnitude 
    mag = np.sqrt(np.square(sobel_x) + np.square(sobel_y))
    
    # Get a binary mask of the magnitude
    binary_output = binary_mask(mag, mag_threshold)
    
    return(binary_output)
In [80]:
mag_threshold = (30, 255)
mag_sobel_kernel_size = 3
mag_R = magnitude_threshold(sample_image_R, mag_sobel_kernel_size, mag_threshold)
plt.imshow(mag_R, cmap = 'gray')
Out[80]:
<matplotlib.image.AxesImage at 0x125c831d0>
In [81]:
def angle_threshold(image, sobel_kernel, angle_threshold):
    """
    Takes in an image and returns a binary image dependent on the mag_threshold
    :::params image: a (length x width) array (only 1 color channel)
              sobel_kernel: the size of the sobel kernel
              angle_threshold: the max and min angles which determines which pixels are active
    """
    # Apply sobel operator on the image
    image_sobel = sobel_calc(image, sobel_kernel)
    sobel_x = image_sobel[0]
    sobel_y = image_sobel[1]
    
    # 3) Take the absolute value of the x and y gradients
    abs_sobel_x = np.absolute(sobel_x)
    abs_sobel_y = np.absolute(sobel_y)
    
    
    # 4) Use np.arctan2(abs_sobely, abs_sobelx) to calculate the direction of the gradient 
    angle = np.arctan2(abs_sobel_y, abs_sobel_x)
    
    # 5) Create a binary mask where direction thresholds are met
    binary_output = np.zeros_like(angle)
    binary_output[(angle >= angle_threshold[0]) & (angle <= angle_threshold[1])] = 1
    return(binary_output.astype(np.uint8))
In [82]:
# Test it
ang_threshold = (0.7, 1.3)# here pi/2 = 90 degrees so 90 degrees ~= 1.7
ang_sobel_kernel_size = 11
angle_R = angle_threshold(sample_image_R, ang_sobel_kernel_size, ang_threshold)
In [83]:
plt.imshow(angle_R, cmap = 'gray')
Out[83]:
<matplotlib.image.AxesImage at 0x125ced668>

Using these threshold functions plot the results of the:

  • 1) S,and B channels to detect yellow lane lines which are in the shadow.
  • 2) HLS's L and R, G channels all do a good job at detect the white lane lines, even in the shadows
In [84]:
# Helper function to get the useful color channels 
def get_color_channels(image):
    R = image.get_R()
    G = image.get_G()
    S = image.get_S()
    l = image.get_l()
    b = image.get_b()
    return([R, G, S, l, b])

import matplotlib.gridspec as gridspec

def plot_thresholds(images, tophat = False):
    """
    This function plots the data using the output of get_data_in_bin
    
    params data: this is a list of data frame rows, one from each bin
    
    """
    
    # Define spacing between the plots
    fig = plt.figure(figsize=(30, 15))
    gs = gridspec.GridSpec(1,5)
    color_channels = ["R", "G", "S", "l", "b"]
   
    # loop through the 
    for i in range(len(images)):
        ax = fig.add_subplot(gs[i])
        
        # If we have a row from the data frame, then we need to extract 
        # the raw data 
        if not tophat:
            ax.imshow(images[i], cmap = 'gray')
        else:
            ax.imshow(images[i])
        ax.set_title(color_channels[i], fontdict = {'size' :30})
    gs.tight_layout(fig)
    plt.show()
In [85]:
# Define the thresholds to be used`
sobel_threshold = (20, 255)
mag_threshold = (20, 255)
ang_threshold = (0.7, 1.2)

# Set the kernel sizes
axis_s_size = 5
angle_s_size = 5
mag_s_size = 5
In [86]:
# Use all the thresholding techniques above on all the color channels
all_channels = get_color_channels(sample_image)
axis_x_thresholded_images = [axis_threshold(image, 0, sobel_kernel = axis_s_size, \
                                            sobel_threshold = sobel_threshold) for image in all_channels]
axis_y_thresholded_images = [axis_threshold(image, 1, sobel_kernel = axis_s_size, \
                                            sobel_threshold = sobel_threshold) for image in all_channels]
magnitude_thresholded_images = [magnitude_threshold(image, sobel_kernel = mag_s_size, \
                                                    mag_threshold = mag_threshold) for image in all_channels]
angle_thresholded_images = [angle_threshold(image, sobel_kernel = angle_s_size, \
                                            angle_threshold = ang_threshold) for image in all_channels]
In [87]:
plot_thresholds(all_channels)
In [88]:
plot_thresholds(axis_x_thresholded_images)
In [89]:
plot_thresholds(axis_y_thresholded_images)
In [90]:
plot_thresholds(magnitude_thresholded_images)
In [91]:
plot_thresholds(angle_thresholded_images)
In [92]:
useful_images = [axis_x_thresholded_images[0], \
                 axis_x_thresholded_images[1], \
                 axis_x_thresholded_images[3], \
                 axis_x_thresholded_images[4], \
                 axis_y_thresholded_images[4],\
                 magnitude_thresholded_images[4]] 
In [93]:
stacked = np.zeros_like(useful_images[0])
for image in useful_images:
    stacked[(image == 1)] += 1
In [94]:
plt.imshow(stacked, cmap = 'gray')
plt.title("Stacked image", fontsize = 30)
Out[94]:
<matplotlib.text.Text at 0x138832748>
In [95]:
non_tophat_binary_image = np.zeros_like(stacked)
non_tophat_binary_image[stacked >= (len(useful_images)/2)] = 1
plt.imshow(non_tophat_binary_image, cmap = 'gray')
plt.title("Thresholded stacked image")
Out[95]:
<matplotlib.text.Text at 0x133c1b828>
  • Seems like the LAB's b channel and HLS's s channel are pretty good for detecting the yellow lane lines.
  • The HLS's s channel also seems to do a good job in detecting the white lane lines on a grey surface

Use a tophat and erode and see if it makes the lane lines clearer

In [96]:
# Define the morphological transforms
def erode(image, kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))):
    return(cv2.morphologyEx(image, cv2.MORPH_ERODE, kernel))

def tophat(image, kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))):
    return(cv2.morphologyEx(image, cv2.MORPH_TOPHAT, kernel))

def close(image, kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))):
    return(cv2.morphologyEx(image, cv2.MORPH_CLOSE, kernel))

def median_filter(image, filter_size):
    return(cv2.medianBlur(image, filter_size))

def adaptive_transform(image, blocksize, C):
    return(cv2.adaptiveThreshold(image, 1, cv2.ADAPTIVE_THRESH_GAUSSIAN_C, \
                         cv2.THRESH_BINARY, blocksize, C))
In [97]:
# Seems a bit noist on the edge 
close_kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (9,9))
non_tophat_binary_image = close(non_tophat_binary_image, close_kernel)
In [98]:
plt.imshow(non_tophat_binary_image, cmap = 'gray')
Out[98]:
<matplotlib.image.AxesImage at 0x1306e89e8>
In [99]:
non_tophat_binary_image = erode(non_tophat_binary_image)
In [100]:
plt.imshow(non_tophat_binary_image, cmap = 'gray')
Out[100]:
<matplotlib.image.AxesImage at 0x12c9de5f8>
In [101]:
def get_non_tophat_binary_image(wrapped_image):
    
    # Define the thresholds to be used
    sobel_threshold = (20, 255)
    mag_threshold = (20, 255)
    ang_threshold = (0.7, 1.2)

    # Set the kernel sizes
    axis_s_size = 5
    angle_s_size = 5
    mag_s_size = 5
    smoothing_k_size = 5
        
    # Do the thresholding for the images
    all_channels = get_color_channels(wrapped_image)
    axis_x_thresholded_images = [axis_threshold(image, 0, sobel_kernel = axis_s_size, \
                                                sobel_threshold = sobel_threshold) for image in all_channels]
    axis_y_thresholded_images = [axis_threshold(image, 1, sobel_kernel = axis_s_size, \
                                                sobel_threshold = sobel_threshold) for image in all_channels]
    magnitude_thresholded_images = [magnitude_threshold(image, sobel_kernel = mag_s_size, \
                                                        mag_threshold = mag_threshold) for image in all_channels]
    angle_thresholded_images = [angle_threshold(image, sobel_kernel = angle_s_size, \
                                                angle_threshold = ang_threshold) for image in all_channels]
    
    
    # select the useful images
    useful_images = [axis_x_thresholded_images[0], \
                 axis_x_thresholded_images[1], \
                 axis_x_thresholded_images[3], \
                 axis_x_thresholded_images[4], \
                 axis_y_thresholded_images[4],\
                 magnitude_thresholded_images[4]] 

    
    stacked = np.zeros_like(useful_images[0])
#     plt.imshow(stacked, cmap = 'gray')
    
    for image in useful_images:
        stacked[(image == 1)] += 1
        
    non_tophat_binary_image = np.zeros_like(stacked)
    non_tophat_binary_image[stacked >= (len(useful_images)/2)] = 1
    
    close_kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (9, 9))
    non_tophat_binary_image = erode(close(non_tophat_binary_image, close_kernel))

    return(non_tophat_binary_image.astype(np.uint8))  
In [102]:
non_tophat_binary_image = get_non_tophat_binary_image(sample_image)
plt.imshow(non_tophat_binary_image, cmap = 'gray')
Out[102]:
<matplotlib.image.AxesImage at 0x130677d30>

Try using the tophat transform

In [103]:
# Tophat transform all the images
tophat_kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (5, 5))
all_channels_tophat = [tophat(image, tophat_kernel) for image in all_channels]
axis_x_thresholded_images = [axis_threshold(image, 0, sobel_kernel = axis_s_size, sobel_threshold = sobel_threshold) for image in all_channels_tophat]
axis_y_thresholded_images = [axis_threshold(image, 1, sobel_kernel = axis_s_size, sobel_threshold = sobel_threshold) for image in all_channels_tophat]
magnitude_thresholded_images = [magnitude_threshold(image, sobel_kernel = mag_s_size, mag_threshold = mag_threshold) for image in all_channels_tophat]
angle_thresholded_images = [angle_threshold(image, sobel_kernel = angle_s_size, angle_threshold = ang_threshold) for image in all_channels_tophat]
In [104]:
plot_thresholds(all_channels_tophat, tophat = True)
In [105]:
plot_thresholds(axis_x_thresholded_images)
In [106]:
plot_thresholds(axis_y_thresholded_images)
In [107]:
plot_thresholds(magnitude_thresholded_images)
In [108]:
plot_thresholds(angle_thresholded_images)
  • Use HLS's L channel, and the LAB's L and B channel to try to find the lane lines
  • But first try to get rid of the horizontal noise by using the axis_threshold function
In [109]:
def stacked_tophat(wrapped_image, kernel):
    L_channel = wrapped_image.get_L()
    l_channel = wrapped_image.get_l() 
    b_channel = wrapped_image.get_b()
#     plt.imshow(tophat(b_channel, kernel))
#     ang_threshold = (0.7, 1.2)
#     angle_sobel_size = 5
#     median_filter_size = 11
#     angle_thresholded_b_channel = median_filter(angle_threshold(b_channel, angle_sobel_size, \
#                                                   ang_threshold), median_filter_size)
    tophat_L = scale_image(tophat(L_channel, kernel), 255)
    tophat_l = scale_image(tophat(l_channel, kernel), 255)
    tophat_b = scale_image(tophat(b_channel, kernel), 255)
# #     angle_thresholded_b_channel = scale_image(angle_thresholded_b_channel, 255)
#     # Note here that we add tophat_b twice as only the b channel detects the yellow lane lines
    stacked_tophat = scale_image(tophat_L + tophat_l + \
                                 tophat_b, \
                                 255).astype(np.uint8)
    return(stacked_tophat)
# stacked_tophat_kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (19, 19))
# stacked_tophat(sample_image, stacked_tophat_kernel)
In [110]:
# First stack the tophat images
stacked_tophat_kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (13, 13))
stacked_tophat_image = stacked_tophat(sample_image, stacked_tophat_kernel)

# Remove the x noise
sobel_kernel = 5
sobel_threshold = (10, 80)
stacked_tophat_image_x_removed = axis_threshold(stacked_tophat_image, 0, sobel_kernel, sobel_threshold)

# Remove the salt and pepper noise
filter_size = 5
stacked_tophat_image_x_sp_removed = median_filter(stacked_tophat_image_x_removed, filter_size)

# Morphological close
close_kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (9, 9))
close_stacked_tophat_image_x_sp_removed = close(stacked_tophat_image_x_sp_removed, close_kernel)

# Plot the images in each stage
f, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2, 2, figsize = (30, 15))
ax1.imshow(stacked_tophat_image, cmap = 'gray')
ax2.imshow(stacked_tophat_image_x_removed)
ax3.imshow(stacked_tophat_image_x_sp_removed)
ax4.imshow(close_stacked_tophat_image_x_sp_removed)
# plt.imshow(close(axis_threshold(stacked_tophat_image, 0, sobel_kernel, sobel_threshold), close_kernel))
# adapt_stacked_tophat_image = cv2.adaptiveThreshold(stacked_tophat_image, 1, cv2.ADAPTIVE_THRESH_MEAN_C, cv2.THRESH_BINARY, 13, -9) 
# plt.imshow(adapt_stacked_tophat_image)
Out[110]:
<matplotlib.image.AxesImage at 0x1259ffc88>
In [111]:
def get_tophat_binary_image(wrapped_image):
    stacked_tophat_kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (13, 13))
    stacked_tophat_image = stacked_tophat(wrapped_image, stacked_tophat_kernel)

    # Remove the x noise
    sobel_kernel = 5
    sobel_threshold = (10, 80)
    stacked_tophat_image_x_removed = axis_threshold(stacked_tophat_image, 0, sobel_kernel, sobel_threshold)

    # Remove the salt and pepper noise
    filter_size = 5
    stacked_tophat_image_x_sp_removed = median_filter(stacked_tophat_image_x_removed, filter_size)

    # Morphological close
    close_kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (9, 9))
    close_stacked_tophat_image_x_sp_removed = close(stacked_tophat_image_x_sp_removed, close_kernel)
    return(close_stacked_tophat_image_x_sp_removed)
In [112]:
tophat_binary_image = get_tophat_binary_image(sample_image)
plt.imshow(tophat_binary_image, cmap = 'gray')
Out[112]:
<matplotlib.image.AxesImage at 0x13d53cac8>
In [113]:
# First do the persective transform
non_tophat_binary_warped = warper(non_tophat_binary_image, src, dst)
tophat_binary_warped = warper(tophat_binary_image, src, dst)
f, (ax1, ax2) = plt.subplots(1, 2, figsize = (30, 15))
ax1.imshow(non_tophat_binary_warped, cmap = 'gray')
ax2.imshow(tophat_binary_warped, cmap = 'gray')
Out[113]:
<matplotlib.image.AxesImage at 0x125cc5358>

Take a look at the two kinds of transforms for all the images

In [114]:
non_tophat_binary_warped_images = [warper(get_non_tophat_binary_image(image), src, dst) \
                                   for image in wrapped_lane_line_images]
tophat_binary_warped_images = [warper(get_tophat_binary_image(image), src, dst) \
                        for image in wrapped_lane_line_images]
original_images = [cv2.cvtColor(image.get_RGB(),cv2.COLOR_RGB2GRAY) for image in wrapped_lane_line_images]
In [115]:
fig = plt.figure(figsize = (30, 15))
gs = gridspec.GridSpec(8, 3)
for i, images in enumerate(zip(non_tophat_binary_warped_images,\
                  tophat_binary_warped_images,\
                  original_images)):
    for j, image in enumerate(images):
        ax = fig.add_subplot(gs[i,j])
        ax.imshow(image, cmap = 'gray')
gs.tight_layout(fig)
plt.show()
In [116]:
# Seems like the non_tophat_binary_warped_images looks better
binary_warped = warper(get_non_tophat_binary_image(sample_image), src, dst)
plt.title("Binary warped image")
plt.imshow(binary_warped, cmap = 'gray')
Out[116]:
<matplotlib.image.AxesImage at 0x1401d6e80>

Finding the lane lines

In [117]:
# Get the histogram of the pixel values for the bottom half of the image
hist = np.sum(binary_warped[binary_warped.shape[0]/2:,:], axis = 0)
plt.plot(hist)
/Users/kevinlu/miniconda3/envs/carnd-term1/lib/python3.5/site-packages/ipykernel/__main__.py:2: VisibleDeprecationWarning: using a non-integer number instead of an integer will result in an error in the future
  from ipykernel import kernelapp as app
Out[117]:
[<matplotlib.lines.Line2D at 0x12cb90c88>]
In [118]:
# Convert the image to RGB
out_image = (np.dstack((binary_warped, binary_warped, binary_warped)) * 255).astype(np.uint8)
print(out_image.shape)
print(out_image[0:1])
plt.imshow(out_image)
(720, 1280, 3)
[[[0 0 0]
  [0 0 0]
  [0 0 0]
  ..., 
  [0 0 0]
  [0 0 0]
  [0 0 0]]]
Out[118]:
<matplotlib.image.AxesImage at 0x143e3d080>
In [119]:
# Find the base of the lines
midpoint = np.int(UNWARPED_SIZE[0]/2)
base_left_x = np.argmax(hist[:midpoint])
base_right_x = np.argmax(hist[midpoint:]) + midpoint
print("Base of left lane is: {0}\nBase of right lane is: {1}"\
      .format(base_left_x, base_right_x))
Base of left lane is: 404
Base of right lane is: 888
In [120]:
# Get the height for the sliding window
n_windows = 9
window_height = np.int(binary_warped.shape[0]/n_windows)
In [121]:
# Identify the x and y positions of all non-zero pixels in the image
# These values are stored in nonzeroy and nonzerox, so the first element of nonzerox, and nonzeroy would give the 
# first coordinate that has a nonzero
nonzero = binary_warped.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
In [122]:
# Current positions
current_left_x = base_left_x
current_right_x = base_right_x

# Set the width of the windows
width = 50

# Set the minimum number of pixels found to recenter the window
minpix = 50

# Create empty lists to receive the left and right lane pixel indcies
left_lane_idx = []
right_lane_idx = []
In [123]:
# Step through the windows one by one
for window in range(n_windows):
    # Height for the windows
    win_y_low = binary_warped.shape[0] - (window + 1) * window_height
    win_y_high = binary_warped.shape[0] - window * window_height
    
    # starting and ending x coordinates of the window for the left lane line
    win_x_left_low = current_left_x - width
    win_x_left_high = current_left_x + width
    
    # starting and ending x coordinates of the window for the right lane line
    win_x_right_low = current_right_x - width
    win_x_right_high = current_right_x + width
    
    # Draw green rectangles on the RGB transformed image
    cv2.rectangle(out_image, (win_x_left_low, win_y_low), (win_x_left_high, win_y_high), (0, 255, 0), 2)
    cv2.rectangle(out_image,(win_x_right_low,win_y_low),(win_x_right_high,win_y_high),(0,255,0), 2) 

    # Identify the indexes of nonzero pixels in the nonzerox, and nonzeroy arrays
    # note that the index will have all the combinations of x, and y coordinates that are non-zero
    # within our specified window
    good_left_idx = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) & \
                     (nonzerox >= win_x_left_low) & (nonzerox < win_x_left_high)).nonzero()[0]
    good_right_idx = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) & \
                      (nonzerox >= win_x_right_low) & (nonzerox < win_x_right_high)).nonzero()[0]
    
#     print(good_left_idx)
#     print(good_right_idx)
    
    # Append these indices to the lists
    left_lane_idx.append(good_left_idx)
    right_lane_idx.append(good_right_idx)
    
    if len(good_left_idx) > minpix:
        current_left_x = np.int(np.mean(nonzerox[good_left_idx]))
    if len(good_right_idx) > minpix:
        current_right_x = np.int(np.mean(nonzerox[good_right_idx]))
In [124]:
# Concatenate the arrays of indices which represent the nonzero elements int he 

left_lane_idx = np.concatenate(left_lane_idx)
right_lane_idx = np.concatenate(right_lane_idx)

# Extract left and right line pixel positions
left_x = nonzerox[left_lane_idx]
left_y = nonzeroy[left_lane_idx] 
right_x = nonzerox[right_lane_idx]
right_y = nonzeroy[right_lane_idx] 
In [125]:
plt.imshow(out_image)
Out[125]:
<matplotlib.image.AxesImage at 0x12e3252b0>
In [126]:
# Convert from pixel space to meters
Y_M_PER_PIXEL = 30/720 # 720 is the height of the image
X_M_PER_PIXEL = 3.7/700 # 700 is the width of the lane in pixel 

def find_poly_fit_with_window(binary_warped):
    # Get the histogram of the pixel values for the bottom half of the image
    hist = np.sum(binary_warped[binary_warped.shape[0]/2:,:], axis = 0)
    
    # Turn the binary image into RGB
    out_image = (np.dstack((binary_warped, binary_warped, binary_warped)) * 255).astype(np.uint8)
    
    # Find the base of the lines
    midpoint = np.int(UNWARPED_SIZE[0]/2)
    base_left_x = np.argmax(hist[:midpoint])
    base_right_x = np.argmax(hist[midpoint:]) + midpoint

    # Get the height for the sliding window
    n_windows = 9
    window_height = np.int(binary_warped.shape[0]/n_windows)

    # The some_array.nonzero() functions return d lists where d is the number of dimensions in the some_array
    # Use this to identify the x and y positions of all non-zero pixels in the image
    
    # These values are stored in nonzeroy and nonzerox, so the first element of nonzerox, and nonzeroy would give the 
    # first coordinate that has a nonzero pixel i.e.the pixel is not black
    nonzero = binary_warped.nonzero() # returns 2 lists of the same length, each list contains the indexes of nonzero elements
    nonzeroy = np.array(nonzero[0])
    nonzerox = np.array(nonzero[1])
    

    # Current positions
    current_left_x = base_left_x
    current_right_x = base_right_x

    # Set the width of the windows
    width = 100

    # Set the minimum number of pixels found to recenter the window
    minpix = 50

    # Create empty lists to receive the left and right lane pixel indcies
    left_lane_idx = []
    right_lane_idx = []

    # Step through the windows one by one
    for window in range(n_windows):
        # Height for the windows
        win_y_low = binary_warped.shape[0] - (window + 1) * window_height
        win_y_high = binary_warped.shape[0] - window * window_height

        # starting and ending x coordinates of the window for the left lane line
        win_x_left_low = current_left_x - width
        win_x_left_high = current_left_x + width

        # starting and ending x coordinates of the window for the right lane line
        win_x_right_low = current_right_x - width
        win_x_right_high = current_right_x + width

        # Draw green rectangles on the RGB transformed image
        cv2.rectangle(out_image, (win_x_left_low, win_y_low), (win_x_left_high, win_y_high), (0, 255, 0), 2)
        cv2.rectangle(out_image, (win_x_right_low, win_y_low), (win_x_right_high, win_y_high), (0, 255, 0), 2) 

        # Identify the indexes of nonzero pixels in the nonzerox, and nonzeroy arrays
        # note that the index will all the combinations of x, and y coordinates that are non-zero
        
        # As some_array >= some_condition returns a boolean array we can & these arrays together, and
        # find the indexes of the nonzeroy, and nonzerox arrays that satisfy these conditions, noting
        # that the nonzery and nonzerox arrays themselves are the actual coordinates of the pixels that
        # aren't 0. Hence, we are able to find the coordinates in the original image that aren't black and 
        # also in our specfied window

        good_left_idx = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) & \
                         (nonzerox >= win_x_left_low) & (nonzerox < win_x_left_high)).nonzero()[0]
        good_right_idx = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) & \
                          (nonzerox >= win_x_right_low) & (nonzerox < win_x_right_high)).nonzero()[0]
        
        # Append all the indexes that are non-zero within the window into an array
        # so that they can be used for coloring later
        left_lane_idx.append(good_left_idx)
        right_lane_idx.append(good_right_idx)
        
        # If there are enough pixels in this window, then move the window to the middle of those pixels
        if len(good_left_idx) > minpix:
            current_left_x = np.int(np.mean(nonzerox[good_left_idx]))
        if len(good_right_idx) > minpix:
            current_right_x = np.int(np.mean(nonzerox[good_right_idx]))
            
    # Concatenate the arrays of indices, as it is a list of lists due to 
    # appending good_left_idx/good_right_idx to the array
    left_lane_idx = np.concatenate(left_lane_idx)
    right_lane_idx = np.concatenate(right_lane_idx)

    # Extract left and right line pixel positions
    left_x = nonzerox[left_lane_idx]
    left_y = nonzeroy[left_lane_idx] 
    right_x = nonzerox[right_lane_idx]
    right_y = nonzeroy[right_lane_idx] 
    
    # Fit a second order polynomial to each
    left_fit = np.polyfit(left_y, left_x, 2)
    right_fit = np.polyfit(right_y, right_x, 2)
    
    # Extract left and right line pixel positions
    left_x_m = nonzerox[left_lane_idx] * X_M_PER_PIXEL
    left_y_m = nonzeroy[left_lane_idx] * Y_M_PER_PIXEL
    right_x_m = nonzerox[right_lane_idx] * X_M_PER_PIXEL
    right_y_m = nonzeroy[right_lane_idx] * Y_M_PER_PIXEL 

    # Fit a second order polynomial to each
    left_fit_m = np.polyfit(left_y_m, left_x_m, 2)
    right_fit_m = np.polyfit(right_y_m, right_x_m, 2)
    
    return(left_fit, right_fit, left_fit_m, right_fit_m, left_lane_idx, right_lane_idx)
In [127]:
# Test our function
left_fit, right_fit, left_fit_m, right_fit_m, left_lane_idx, right_lane_idx = find_poly_fit_with_window(binary_warped) 
plt.imshow(binary_warped, cmap = 'gray')
/Users/kevinlu/miniconda3/envs/carnd-term1/lib/python3.5/site-packages/ipykernel/__main__.py:7: VisibleDeprecationWarning: using a non-integer number instead of an integer will result in an error in the future
Out[127]:
<matplotlib.image.AxesImage at 0x12d4584e0>
In [128]:
def find_poly_fit_after_initial_window(binary_warped, left_fit, right_fit):
    """
    Takes a binary warped that is perspective transformed which the polynomial
    fitted lane lines and outputs the new coefficients for the left and right lane lines
    
    :::params binary_warped: a perspective transformed binary image 
    
              left_fit: the coefficients of the polynomial fit for the left lane line
              
              right_fit: the coefficients of the polynomial fit for the right lane line
    """
    # Get the coordinates of the non-zero pixels
    nonzero = binary_warped.nonzero()
    nonzeroy = np.array(nonzero[0])
    nonzerox = np.array(nonzero[1])
    margin = 100
    
    # Get the left and right lane indexes
    left_lane_idx = ((nonzerox > (left_fit[0]*(nonzeroy**2) + \
                                   left_fit[1]*nonzeroy + left_fit[2] - margin))
                      & (nonzerox < (left_fit[0]*(nonzeroy**2) + \
                                     left_fit[1]*nonzeroy + left_fit[2] + margin))) 
    right_lane_idx = ((nonzerox > (right_fit[0]*(nonzeroy**2) + \
                                    right_fit[1]*nonzeroy + right_fit[2] - margin)) &
                       (nonzerox < (right_fit[0]*(nonzeroy**2) + \
                                    right_fit[1]*nonzeroy + right_fit[2] + margin)))  
    
    # Extract left and right line pixel positions
    left_x = nonzerox[left_lane_idx]
    left_y = nonzeroy[left_lane_idx] 
    right_x = nonzerox[right_lane_idx]
    right_y = nonzeroy[right_lane_idx] 
    
    # Fit a second order polynomial to each
    left_fit = np.polyfit(left_y, left_x, 2)
    right_fit = np.polyfit(right_y, right_x, 2)
    
    # Extract left and right line pixel positions
    left_x_m = nonzerox[left_lane_idx] * X_M_PER_PIXEL
    left_y_m = nonzeroy[left_lane_idx] * Y_M_PER_PIXEL
    right_x_m = nonzerox[right_lane_idx] * X_M_PER_PIXEL
    right_y_m = nonzeroy[right_lane_idx] * Y_M_PER_PIXEL 

    # Fit a second order polynomial to each
    left_fit_m = np.polyfit(left_y_m, left_x_m, 2)
    right_fit_m = np.polyfit(right_y_m, right_x_m, 2)
    
    return(left_fit, right_fit, left_fit_m, right_fit_m, left_lane_idx, right_lane_idx)

Plotting the lane lines

In [129]:
# Fit a second order polynomial to each
left_fit = np.polyfit(left_y, left_x, 2)
right_fit = np.polyfit(right_y, right_x, 2)

# Visualize the results
plot_y = np.linspace(0, binary_warped.shape[0]-1, binary_warped.shape[0])
left_fit_x = left_fit[0] * plot_y ** 2 + left_fit[1] * plot_y + left_fit[2]
right_fit_x = right_fit[0] * plot_y ** 2 + right_fit[1] * plot_y + right_fit[2]

# Color the left and right lane lines
out_image[nonzeroy[left_lane_idx], nonzerox[left_lane_idx]] = [255, 0, 0]
out_image[nonzeroy[right_lane_idx], nonzerox[right_lane_idx]] = [0, 0 ,255]

# Plot the image with the lane lines
plt.imshow(out_image)
plt.plot(left_fit_x, plot_y, color='yellow')
plt.plot(right_fit_x, plot_y, color='yellow')
plt.xlim(0, 1280)
plt.ylim(720, 0)
Out[129]:
(720, 0)
In [130]:
def get_fitted_lines(binary_warped, left_fit, right_fit):
    
    # Get data needed for visualization
    plot_y = np.linspace(0, binary_warped.shape[0]-1, binary_warped.shape[0])
    left_fit_x = left_fit[0] * plot_y ** 2 + left_fit[1] * plot_y + left_fit[2]
    right_fit_x = right_fit[0] * plot_y ** 2 + right_fit[1] * plot_y + right_fit[2]
    return(plot_y, left_fit_x, right_fit_x)
    
 
def plot_fitted_lane_lines(binary_warped, plot_y, left_fit_x, right_fit_x, left_lane_idx, right_lane_idx):
    
    # Get the fitted coordinates
    plot_y, left_fit_x, right_fit_x = get_fitted_lines(binary_warped, left_fit, right_fit)
    
    # Create the RGB image
    out_image = (np.dstack((binary_warped, binary_warped, binary_warped)) * 255).astype(np.uint8)
    nonzero = binary_warped.nonzero()
    nonzeroy = np.array(nonzero[0])
    nonzerox = np.array(nonzero[1])
    
    # Color the left and right lane lines
    out_image[nonzeroy[left_lane_idx], nonzerox[left_lane_idx]] = [255, 0, 0]
    out_image[nonzeroy[right_lane_idx], nonzerox[right_lane_idx]] = [0, 0 ,255]

    # Plot the image with the lane lines
    plt.imshow(out_image)
    plt.title("Binary warped fitted lines")
    plt.plot(left_fit_x, plot_y, color='yellow')
    plt.plot(right_fit_x, plot_y, color='yellow')
    plt.xlim(0, 1280)
    plt.ylim(720, 0)
In [131]:
# Test our function for plotting
plot_y, left_fit_x, right_fit_x = get_fitted_lines(binary_warped, left_fit, right_fit)
plot_fitted_lane_lines(binary_warped, plot_y, left_fit_x, right_fit_x, left_lane_idx, right_lane_idx)

Finding the curvature

In [132]:
# Define y-value where we want radius of curvature
# Choose the max y value (which corresponds to the bottom fo the image)
def find_curvature(y_eval, fit):
    curvature = ((1 + (2*fit[0]*y_eval + fit[1])**2)**1.5) / np.absolute(2*fit[0])
    return(curvature)

y_eval = np.max(plot_y)
print(binary_warped.shape[0]-1)
left_curve_rad = find_curvature(y_eval, left_fit) 
right_curve_rad = find_curvature(y_eval, right_fit)
print(left_curve_rad, right_curve_rad)
719
9013.31864971 27482.3353914
In [133]:
# Fit new polynomials to x,y in world space

# First convert the nonzero points in pixel space to world space
nonzero = binary_warped.nonzero()
nonzeroy = nonzero[0]
nonzerox = nonzero[1]

# Extract left and right line pixel positions
left_x_m = nonzerox[left_lane_idx] * X_M_PER_PIXEL
left_y_m = nonzeroy[left_lane_idx] * Y_M_PER_PIXEL
right_x_m = nonzerox[right_lane_idx] * X_M_PER_PIXEL
right_y_m = nonzeroy[right_lane_idx] * Y_M_PER_PIXEL 

# Fit a second order polynomial to each
left_fit_m = np.polyfit(left_y_m, left_x_m, 2)
right_fit_m = np.polyfit(right_y_m, right_x_m, 2)

# Find the curvature
y_eval_m = y_eval * Y_M_PER_PIXEL
left_curve_rad_m = find_curvature(y_eval_m, left_fit_m)
right_curve_rad_m = find_curvature(y_eval_m, right_fit_m)
print(left_curve_rad_m, right_curve_rad_m)
3002.2076836 79866.0440547

Finding the offset

In [134]:
def find_offset(left_fit_x, right_fit_x):
    left_line_base_x = left_fit_x[-1]
    right_line_base_x = right_fit_x[-1]
    lane_midpoint_x = (left_line_base_x + right_line_base_x)/2.0
    car_x = 1280/2
    return((car_x - lane_midpoint_x) * X_M_PER_PIXEL)

find_offset(left_fit_x, right_fit_x)
Out[134]:
-0.037353218736630472

Filling in the lane lines

In [135]:
# Drawing the lines back down onto the road

# First create an image to draw on
wrap_zero = np.zeros_like(binary_warped).astype(np.uint8)
color_warp = np.dstack((wrap_zero, wrap_zero, wrap_zero))
In [136]:
# Recast the x and y points into usable format for cv2.fillPoly()
pts_left = np.array([np.transpose(np.vstack([left_fit_x, plot_y]))]) # or use np.dstack instead of np.transpose
pts_right = np.array([np.flipud(np.transpose(np.vstack([right_fit_x, plot_y])))])
pts = np.hstack((pts_left, pts_right))
In [137]:
# Draw the lane onto the warped blank image
cv2.fillPoly(color_warp, np.int_([pts]), (0, 255,0))
new_warp = warper(color_warp, dst, src)
In [138]:
# Plot final result 
result = cv2.addWeighted(sample_image.get_RGB(), 1, new_warp, 0.3, 0)
plt.imshow(result)
Out[138]:
<matplotlib.image.AxesImage at 0x12cc42940>
In [139]:
def fill_lane_lines(original_image, binary_warped, plot_y, left_fit_x, right_fit_x, dst, src):
    
    # First create a blank image
    warp_zero = np.zeros_like(binary_warped).astype(np.uint8)
    color_warped = np.dstack((warp_zero, warp_zero, warp_zero))
    
    # Now format the data in a way that cv2.polyfill works
    pts_left = np.array([np.transpose(np.vstack([left_fit_x, plot_y]))]) # or use np.dstack instead of np.transpose
    pts_right = np.array([np.flipud(np.transpose(np.vstack([right_fit_x, plot_y])))])
    pts = np.hstack((pts_left, pts_right)) 
    
    # Draw the lane onto the warped blank image
    cv2.fillPoly(color_warped, np.int_([pts]), (0, 255,0))
    new_warp = warper(color_warped, dst, src)
    
    result = cv2.addWeighted(original_image, 1, new_warp, 0.3, 0)
    return(result)
In [140]:
# Test to see that the function works
plt.imshow(fill_lane_lines(sample_image.get_RGB(), binary_warped, plot_y, left_fit_x, right_fit_x, dst, src))
Out[140]:
<matplotlib.image.AxesImage at 0x12c7d1128>

The final image pipeline

First get the distortion matrix for camera

In [141]:
CALIBRATION_FOLDER_NAME = "camera_cal"

# image paths for calibration images
calibration_image_paths = glob.glob(os.path.join(CALIBRATION_FOLDER_NAME, "calibration*.jpg"))

# image path for test image
test_image_path = os.path.join(CALIBRATION_FOLDER_NAME, "test_for_calibration.jpg")

# For the parameters for the calibration
mtx, dist = calibrate_camera(calibration_image_paths, test_image_path, 9, 6)

Define the image to be used

In [142]:
image = lane_line_images[0]
sample_image = ImageWrapper(image, ColorSpace.RGB)
plt.imshow(image)
Out[142]:
<matplotlib.image.AxesImage at 0x129e690b8>

Now extract the lane lines

In [143]:
binary_warped = warper(get_non_tophat_binary_image(sample_image), src, dst)
binary_warped[:,0:200] = 0
binary_warped[:,1080:1280] = 0
plt.imshow(binary_warped, cmap = 'gray')
Out[143]:
<matplotlib.image.AxesImage at 0x129ed0908>

Get the coefficients for the lane line fits

In [144]:
left_fit, right_fit, right_fit_m, left_fit_m, left_lane_idx, right_lane_idx = find_poly_fit_with_window(binary_warped) 
/Users/kevinlu/miniconda3/envs/carnd-term1/lib/python3.5/site-packages/ipykernel/__main__.py:7: VisibleDeprecationWarning: using a non-integer number instead of an integer will result in an error in the future

Find the curvature

In [145]:
# Convert from pixel space to meters
Y_M_PER_PIXEL = 30/720 # 720 is the height of the image
X_M_PER_PIXEL = 3.7/700 # 700 is the width of the lane in pixel 

y_eval = float(binary_warped.shape[0]-1.0) * Y_M_PER_PIXEL
left_curvature = find_curvature(y_eval, left_fit_m)
right_curvature = find_curvature(y_eval, right_fit_m)

print(left_curvature, right_curvature)
20992.2146038 25194.1935439

Find the offset

In [146]:
offset = find_offset(left_fit_x, right_fit_x)
offset = np.round(offset, 2)
if offset < 0:
    offset_text = str(offset) + " meters left of the center of lane"
else:
    offset_text = str(offset) + " meters right of the center of lane"
print(offset)
print(offset_text)
-0.04
-0.04 meters left of the center of lane

Filling in the lane lines

In [147]:
filled_lane_line_image = fill_lane_lines(sample_image.get_RGB(), binary_warped, plot_y, left_fit_x, right_fit_x, dst, src)

# Write some Text
font = cv2.FONT_HERSHEY_SIMPLEX
cv2.putText(filled_lane_line_image,'Offset is: {0}'.format(offset_text),(50,50), font, 1,(255,255,255),2)
if (left_curvature > 10000) or (right_curvature > 10000):
    cv2.putText(filled_lane_line_image,'Left curvature is: STRAIGHT',(50,100), font, 1,(255,255,255),2)
    cv2.putText(filled_lane_line_image,'Right curvature is: STRAIGHT',(50,150), font, 1,(255,255,255),2)
else:
    cv2.putText(filled_lane_line_image,'Left curvature is: {0}'.format(left_curvature),(50,100), font, 1,(255,255,255),2)
    cv2.putText(filled_lane_line_image,'Right curvature is: {0}'.format(right_curvature),(50,150), font, 1,(255,255,255),2)
    
plt.imshow(filled_lane_line_image)
Out[147]:
<matplotlib.image.AxesImage at 0x14d3002b0>
In [148]:
# Define a helper class for us to keep track of the lines

# Convert from pixel space to meters
Y_M_PER_PIXEL = 30/720 # 720 is the height of the image
X_M_PER_PIXEL = 3.7/700 # 700 is the width of the lane in pixel 

class Line():
    def __init__(self, n):
        # was the line detected in the last iteration?
        self.detected = False  
        
        self.num_times_did_not_detect = 0
        
        self.num_threshold_for_initial_search = 5
        
        # x values of the last n fits of the line
        self.recent_x_fitted = [] 
        
        # polynomial coefficients of the last n fits of the line
        self.recent_fit = []
        self.recent_fit_m = []
        
        # polynomial coefficients averaged over the last n iterations
        self._best_fit = None  
        self._best_fit_m = None
        
        # setting the n value
        self.n = n
        
        # polynomial coefficients for the most recent fit
        self.current_fit = []  
        self.current_fit_m = []
        
        # radius of curvature of the line in some units
        self.radius_of_curvature = None 
        
        # distance in meters of vehicle center from the line
        self.line_base_pos = None 
        
        # difference in fit coefficients between last and new fits
        self.diffs = np.array([0,0,0], dtype='float') 
        
        # indexes for the nonzero elements that determine the lane line
        self.non_zero_index = []
    
    @property # equivalent to best_fit = property(best_fit) which is setting the getter 
    def best_fit(self):
#         print("get best fit value")
        self._calculate_best_fit()
#         print(self._best_fit)
        return(self._best_fit)

    def _calculate_best_fit(self):
#         print("calculating best fit")
        fit_coefficients = np.array(self.recent_fit)
        self._best_fit = (np.mean(fit_coefficients, axis = 0))
    
    @property
    def best_fit_m(self):
#         print("get best fit value in meters")
        self._calculate_best_fit_m()
#         print(self._best_fit_m)
        return(self._best_fit_m)
    
    def _calculate_best_fit_m(self):
#         print("calculating best fit value in meters")
        fit_coefficients_m = np.array(self.recent_fit_m)
        self._best_fit_m = (np.mean(fit_coefficients_m, axis = 0))
    
    def did_detect(self):
        self.num_times_did_not_detect = 0
        
    def did_not_detect(self):
        self.num_times_did_not_detect += 1
        
    def determine_if_detected(self):
        if self.num_times_did_not_detect < self.num_threshold_for_initial_search:
            return(True)
        else:
            return(False)
        
    def append_recent_x_fitted(self, x_fit):
        if len(self.recent_x_fitted) >= self.n:
            self.recent_x_fitted.pop(0)
        self.recent_x_fitted.append(x_fit)
    
    def append_recent_fit(self, fit):
        if len(self.recent_fit) >= self.n:
            self.recent_fit.pop(0)
        self.recent_fit.append(fit)
        self.current_fit = fit
    
    def append_recent_fit_m(self, fit_m):
        if len(self.recent_fit) >= self.n:
            self.recent_fit_m.pop(0)
        self.recent_fit_m.append(fit_m)
        self.current_fit_m = fit_m
        
    def append_non_zero_index(self, indexes):
        if len(self.non_zero_index) >= self.n:
             self.non_zero_index.pop(0)
        self.non_zero_index.append(indexes)
        
    def _delete_most_recent_x_fitted(self):
        self.recent_x_fitted.pop()
    
    def _delete_most_recent_fit(self):
        self.recent_fit.pop()
        
    def _delete_most_recent_non_zero_index(self):
        self.non_zero_index.pop()
        
    def delete_data_from_last_frame(self):
        self._delete_most_recent_x_fitted()
        self._delete_most_recent_fit()
        self._delete_most_recent_non_zero_index()
        
    def check_coefficients(self):
        return
#         print(self.diffs)
    
    def _calculate_coefficient_diff(self):
        previous_coefficients = np.array(recent_fit[-2])
        current_coefficients = np.array(self.current_fit)
        self.diffs = current_coefficients - previous_coefficients
    
    
In [153]:
n = 10
left_lane = Line(n)
right_lane = Line(n)

def process_image_detect_lane_line(image):
    # Wrap the image for easier manipulation
    sample_image = ImageWrapper(image, ColorSpace.RGB)
    
    # Do the binary transform
    binary_warped = warper(get_non_tophat_binary_image(sample_image), src, dst)
    binary_warped[:,0:200] = 0
    binary_warped[:,1080:1280] = 0
    
    # Check if we need to do the initial search again
    if (len(left_lane.recent_fit) == 0 and len(right_lane.recent_fit) == 0\
        and (left_lane.detected == False) and (right_lane.detected == False)):
        print("DIDN'T FIND WINDOW DOING INITIAL SEARCH AGAIN!")
        # Find the left and right fits, and also their associated indexes
        left_fit, right_fit, left_fit_m, right_fit_m,_,_ = find_poly_fit_with_window(binary_warped) 
    else:
        # In the case that we don't have to do the initial search use the previous fits
        # to determine the current fit
        recent_left_fit = left_lane.recent_fit[-1]
        recent_right_fit = right_lane.recent_fit[-1] 
        left_fit, right_fit, left_fit_m, right_fit_m,_,_ = find_poly_fit_after_initial_window(binary_warped, \
                                                                                         recent_left_fit, \
                                                                                         recent_right_fit) 

#     # Calculate the values needed to check if the lines that were found are good
    plot_y, left_fit_x, right_fit_x = get_fitted_lines(binary_warped, left_fit, right_fit)

    if ((check_lane_width(left_fit_x, right_fit_x) == False) or (check_parallel(left_fit_x, right_fit_x) == False)):
        print("Lanes weren't detected")
        # If the lanes detected aren't good then don't append the data to the lane
        left_lane.detected = False
        right_lane.detected = False
        
        if(len(left_lane.recent_fit) == 0 and len(right_lane.recent_fit) == 0):
            return(sample_image.get_RGB())
    else:
        print("Lanes were detected")
        # If the lanes that detected are value lanes, then append them to the data
        left_lane.detected = True
        right_lane.detected = True
        
        # Append the new fits
        left_lane.append_recent_fit(left_fit)
        right_lane.append_recent_fit(right_fit)
        left_lane.append_recent_fit_m(left_fit_m)
        right_lane.append_recent_fit_m(right_fit_m)
        
        # Calculate the x values using the best fit
        plot_y, left_fit_x, right_fit_x = get_fitted_lines(binary_warped, left_lane.best_fit, right_lane.best_fit)
        
        # Add the calculated values
        left_lane.append_recent_x_fitted(left_fit_x)
        right_lane.append_recent_x_fitted(right_fit_x)
    
    # Calculate the curvatures
    y_eval = float(binary_warped.shape[0]-1.0) * Y_M_PER_PIXEL
    left_curvature = find_curvature(y_eval, left_lane.best_fit_m)
    right_curvature = find_curvature(y_eval, right_lane.best_fit_m)
    
    # Find the offset
    recent_left_lane_x_fitted = left_lane.recent_x_fitted[-1]
    recent_right_lane_x_fitted = right_lane.recent_x_fitted[-1]
    offset = find_offset(recent_left_lane_x_fitted, recent_right_lane_x_fitted)
    offset = np.round(offset, 2)
    
    if offset < 0:
        offset_text = str(abs(offset)) + " meters left of the center of lane"
    else:
        offset_text = str(abs(offset)) + " meters right of the center of lane"
        
    # Fill in the lane line
    filled_lane_line_image = fill_lane_lines(sample_image.get_RGB(), binary_warped, plot_y, \
                                            recent_left_lane_x_fitted, recent_right_lane_x_fitted,\
                                             dst, src)
    
    # Write some Text
    font = cv2.FONT_HERSHEY_SIMPLEX 
    cv2.putText(filled_lane_line_image,'Offset is: {0}'.format(offset_text),(50,50), font, 1,(255,255,255),2)
    
    if (left_curvature > 10000) or (right_curvature > 10000):     
        cv2.putText(filled_lane_line_image,'Left curvature is: STRAIGHT',(50,100), font, 1,(255,255,255),2)
        cv2.putText(filled_lane_line_image,'Right curvature is: STRAIGHT',(50,150), font, 1,(255,255,255),2)
    else:
        cv2.putText(filled_lane_line_image,'Left curvature is: {0}'.format(left_curvature),(50,100), font, 1,(255,255,255),2)
        cv2.putText(filled_lane_line_image,'Right curvature is: {0}'.format(right_curvature),(50,150), font, 1,(255,255,255),2)
        
    return(filled_lane_line_image) 

def check_lane_width(left_fit_x, right_fit_x):
    lane_width = find_lane_width(left_fit_x, right_fit_x)
#     print("lane_width is {0}".format(lane_width))
    if (2 < lane_width < 4):
        return(True)
    else:
        return(False)

def find_lane_width(left_fit_x, right_fit_x, in_meters = True):
#     print(left_fit_x)
    left_lane_base_x = left_fit_x[-1]
    right_lane_base_x = right_fit_x[-1]
    if in_meters:
        return((right_lane_base_x - left_lane_base_x) * X_M_PER_PIXEL) 
    else:
        return(right_lane_base_x - left_lane_base_x)
    
    
def check_parallel(left_fit_x, right_fit_x):
    initial_width = find_lane_width(left_fit_x, right_fit_x)
    for i in range(len(left_fit_x)):
        diff = (right_fit_x[i] - left_fit_x[i]) * X_M_PER_PIXEL
        if abs(diff-initial_width) > 0.5:
            return(False)
    return(True)
In [154]:
processed_image = process_image_detect_lane_line(lane_line_images[0])
plt.imshow(processed_image)
DIDN'T FIND WINDOW DOING INITIAL SEARCH AGAIN!
Lanes were detected
Out[154]:
<matplotlib.image.AxesImage at 0x15772e470>
In [ ]:
# Import everything needed to edit/save/watch video clips
from moviepy.editor import VideoFileClip
from IPython.display import HTML
project_video_output = 'project_video_result.mp4'
clip1 = VideoFileClip("project_video.mp4")
project_video_clip = clip1.fl_image(process_image_detect_lane_line) #NOTE: this function expects color images!!
%time project_video_clip.write_videofile(project_video_output, audio=False)

Vehicle Detection and Tracking

In [3]:
# Import required libraries
from skimage.feature import hog
from sklearn.model_selection import train_test_split
import matplotlib.image as mpimg
import numpy as np
import cv2
from sklearn import svm
from sklearn.preprocessing import StandardScaler
In [4]:
# Read in the training data 
vehicles = glob.glob("vehicles/**/*.png")
non_vehicles = glob.glob("non-vehicles/**/*.png")
In [5]:
# Plot out a car and non car result
read_image = lambda image_path : (mpimg.imread(image_path) * 255).astype(np.uint8)
sample_vehicle = read_image(vehicles[0])
sample_nonvehicle = read_image(non_vehicles[0])

# Create the plot
f, (ax1, ax2) = plt.subplots(1, 2, figsize = (15, 30))
ax1.imshow(sample_vehicle)
ax1.set_title("Car", fontsize = 30)
ax2.imshow(sample_nonvehicle)
ax2.set_title("Non-Car", fontsize = 30)
Out[5]:
<matplotlib.text.Text at 0x120e77e48>

Extract features from images using various techniques

In [6]:
# Define a function to return HOG features and visualization
def get_hog_features(img, orient, pix_per_cell, cell_per_block, 
                        vis=False, feature_vec=True):
    # Call with two outputs if vis==True
    if vis == True:
        features, hog_image = hog(img, orientations=orient, 
                                  pixels_per_cell=(pix_per_cell, pix_per_cell),
                                  cells_per_block=(cell_per_block, cell_per_block), 
                                  transform_sqrt=True, 
                                  visualise=vis, feature_vector=feature_vec)
        return features, hog_image
    # Otherwise call with one output
    else:      
        features = hog(img, orientations=orient, 
                       pixels_per_cell=(pix_per_cell, pix_per_cell),
                       cells_per_block=(cell_per_block, cell_per_block), 
                       transform_sqrt=True, 
                       visualise=vis, feature_vector=feature_vec)
        return features
In [9]:
# Test the function for both return types
orient = 9 # setting the number of bins so 9 bins would mean each bin is 40 degrees
pix_per_cell = 8 # setting the number of pixels that make up a cell, these cells are how the hog features are created
cell_per_block = 2 # this specifies the width/length of a square kernel


features_vehicle, hog_image_vehicle = get_hog_features(sample_vehicle[:,:,0], orient, pix_per_cell, cell_per_block, vis = True)
print("features.shape for feature_vector = True is: {}".format(features_vehicle.shape)) 

# as our picture is 64 x 64, we would have 64/8 = 8 cells on each side, so a total of 64 cells
# As each block contains 2 cells on each side or 4 cells in total there would be 2x2x9 features per block
# Therefore using a stride of 1, we would get 7 steps on either side of the image yielding 7x7x2x2x9 features
features_nonvehicle, hog_image_nonvehicle = get_hog_features(sample_nonvehicle[:,:,0], orient, pix_per_cell, cell_per_block, vis = True, feature_vec = False)
print("features.shape for feature_vector = False is: {}".format(features_nonvehicle.shape)) 
features.shape for feature_vector = True is: (1764,)
features.shape for feature_vector = False is: (7, 7, 2, 2, 9)
In [11]:
# Plot result
f, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2, 2, figsize = (30, 15))
ax1.imshow(sample_vehicle[:,:,0], cmap = 'gray')
ax1.set_title("Vehicle R channel")
ax2.imshow(hog_image_vehicle)
ax2.set_title("Vehicle R channel HOG")
ax3.imshow(sample_nonvehicle[:,:,0], cmap = 'gray')
ax3.set_title("Non-Vehicle R channel")
ax4.imshow(hog_image_nonvehicle)
ax4.set_title("Non-Vehicle R channel HOG")
Out[11]:
<matplotlib.text.Text at 0x1235babe0>
In [12]:
# Define a function to compute binned color features  

def bin_spatial(img, size=(32, 32)):
    # Use cv2.resize().ravel() to create the feature vector
    features = cv2.resize(img, size).ravel() 
    # Return the feature vector
    return features
In [14]:
# Using the pure pixel values
spatial_features = bin_spatial(sample_vehicle)
print(spatial_features.shape)
(3072,)
In [17]:
# Define a function to compute color histogram features 
# NEED TO CHANGE bins_range if reading .png files with mpimg!
def color_hist(img, nbins=32, bins_range=(0, 256)):
    
    # Compute the histogram of the color channels separately
    channel1_hist = np.histogram(img[:,:,0], bins=nbins, range=bins_range)
    channel2_hist = np.histogram(img[:,:,1], bins=nbins, range=bins_range)
    channel3_hist = np.histogram(img[:,:,2], bins=nbins, range=bins_range)
    
    # Concatenate the histograms into a single feature vector
    hist_features = np.concatenate((channel1_hist[0], channel2_hist[0], channel3_hist[0]))
    
    # Return the individual histograms, bin_centers and feature vector
    return hist_features
In [19]:
# Using the binned pixel values
color_hist_features = color_hist(sample_vehicle)
print(color_hist_features.shape)
(96,)
In [20]:
# Define a function to extract features from a list of images
# Have this function call bin_spatial() and color_hist()
def extract_features(imgs, color_space='RGB', spatial_size=(32, 32),
                        hist_bins=32, orient=9, 
                        pix_per_cell=8, cell_per_block=2, hog_channel=0,
                        spatial_feat=True, hist_feat=True, hog_feat=True):
    # Create a list to append feature vectors to
    features = []
    # Iterate through the list of images
    for file in imgs:
        file_features = []
        # Read in each one by one
        image = (mpimg.imread(file) * 255).astype(np.uint8)
        # apply color conversion if other than 'RGB'
        if color_space != 'RGB':
            if color_space == 'HSV':
                feature_image = cv2.cvtColor(image, cv2.COLOR_RGB2HSV)
            elif color_space == 'LUV':
                feature_image = cv2.cvtColor(image, cv2.COLOR_RGB2LUV)
            elif color_space == 'HLS':
                feature_image = cv2.cvtColor(image, cv2.COLOR_RGB2HLS)
            elif color_space == 'YUV':
                feature_image = cv2.cvtColor(image, cv2.COLOR_RGB2YUV)
            elif color_space == 'YCrCb':
                feature_image = cv2.cvtColor(image, cv2.COLOR_RGB2YCrCb)
            elif color_space == 'LAB':
                feature_image = cv2.cvtColor(image, cv2.COLOR_RGB2LAB)
        else: 
            feature_image = np.copy(image)      

        if spatial_feat == True:
            spatial_features = bin_spatial(feature_image, size=spatial_size)
            file_features.append(spatial_features)
        if hist_feat == True:
            hist_features = color_hist(feature_image, nbins=hist_bins)
            file_features.append(hist_features)
        if hog_feat == True:
        # Call get_hog_features() with vis=False, feature_vec=True
            if hog_channel == 'ALL':
                hog_features = []
                for channel in range(feature_image.shape[2]):
                    hog_features.append(get_hog_features(feature_image[:,:,channel], 
                                        orient, pix_per_cell, cell_per_block, 
                                        vis=False, feature_vec=True))
                hog_features = np.ravel(hog_features)        
            else:
                hog_features = get_hog_features(feature_image[:,:,hog_channel], orient, 
                            pix_per_cell, cell_per_block, vis=False, feature_vec=True)
            # Append the new feature vector to the features list
            file_features.append(hog_features)
        features.append(np.concatenate(file_features))
    # Return list of feature vectors
    return features 

Train the Linear SVC

In [21]:
color_space = 'LAB' # Can be RGB, HSV, LUV, HLS, YUV, YCrCb, LAB
orient = 9  # HOG orientations
pix_per_cell = 8 # HOG pixels per cell
cell_per_block = 2 # HOG cells per block
hog_channel = 'ALL' # Can be 0, 1, 2, or "ALL"
spatial_size = (16, 16) # Spatial binning dimensions
hist_bins = 16    # Number of histogram bins
spatial_feat = True # Spatial features on or off
hist_feat = True # Histogram features on or off
hog_feat = True # HOG features on or off
In [22]:
def get_data(vehicles, non_vehicles, color_space, orient, pix_per_cell, \
             cell_per_block, hog_channel, spatial_size, hist_bins, \
             spatial_feat, hist_feat, hog_feat):
    vehicle_features = extract_features(vehicles, color_space=color_space, 
                        spatial_size=spatial_size, hist_bins=hist_bins, 
                        orient=orient, pix_per_cell=pix_per_cell, 
                        cell_per_block=cell_per_block, 
                        hog_channel=hog_channel, spatial_feat=spatial_feat, 
                        hist_feat=hist_feat, hog_feat=hog_feat)
    non_vehicle_features = extract_features(non_vehicles, color_space=color_space, 
                            spatial_size=spatial_size, hist_bins=hist_bins, 
                            orient=orient, pix_per_cell=pix_per_cell, 
                            cell_per_block=cell_per_block, 
                            hog_channel=hog_channel, spatial_feat=spatial_feat, 
                            hist_feat=hist_feat, hog_feat=hog_feat)
    X = np.vstack((vehicle_features, non_vehicle_features)).astype(np.float64)                        
    # Fit a per-column scaler
    X_scaler = StandardScaler().fit(X)
    # Apply the scaler to X
    scaled_X = X_scaler.transform(X)

    # Define the labels vector
    y = np.hstack((np.ones(len(vehicle_features)), np.zeros(len(non_vehicle_features))))

    return(scaled_X, y, X_scaler)
In [ ]:
# Test the function
X, y, X_scaler = get_data(vehicles, non_vehicles, color_space, orient, \
                         pix_per_cell, cell_per_block, hog_channel, \
                         spatial_size, hist_bins, spatial_feat, \
                         hist_feat, hog_feat)
In [ ]:
def train_svm(X, y):
    rand_state = np.random.randint(0, 100)
    X_train, X_test, y_train, y_test = train_test_split(
        X, y, test_size=0.2, random_state=rand_state)
    X_train, X_val, y_train,y_val = train_test_split(X_train, y_train, test_size = 0.2)
    print('Using:',orient,'orientations',pix_per_cell,
        'pixels per cell and', cell_per_block,'cells per block')
    print('Feature vector length:', len(X_train[0]))
    # Use a linear SVC 
    svc = svm.LinearSVC()
    # Check the training time for the SVC
    svc.fit(X_train, y_train)
    # Check the score of the SVC
    print('Validation Accuracy of SVC = ', round(svc.score(X_val, y_val), 4))
    print('Test Accuracy of SVC = ', round(svc.score(X_test, y_test), 4))
    return(svc) 
In [205]:
# Save the SVC and X_scaler
svc = train_svm(X, y)
model_info = {"model":svc, "scaler":X_scaler}
pickle.dump(model_info, open("model_{}.p".format(color_space), "wb"))
Using: 9 orientations 8 pixels per cell and 2 cells per block
Feature vector length: 6108
Validation Accuracy of SVC =  0.9919
Test Accuracy of SVC =  0.9918
In [28]:
def load_model(color_space):
    model_info = pickle.load(open("model_{}.p".format(color_space), "rb"))
    svc = model_info["model"]
    X_scaler = model_info["scaler"]
    return(svc, X_scaler)

Classify the boxes that were found and see if it is a car

In [29]:
def convert_color(image, color_space ='RGB'):
    if color_space != 'RGB':
        if color_space == 'HSV':
            feature_image = cv2.cvtColor(image, cv2.COLOR_RGB2HSV)
        elif color_space == 'LUV':
            feature_image = cv2.cvtColor(image, cv2.COLOR_RGB2LUV)
        elif color_space == 'HLS':
            feature_image = cv2.cvtColor(image, cv2.COLOR_RGB2HLS)
        elif color_space == 'YUV':
            feature_image = cv2.cvtColor(image, cv2.COLOR_RGB2YUV)
        elif color_space == 'YCrCb':
            feature_image = cv2.cvtColor(image, cv2.COLOR_RGB2YCrCb)
        elif color_space == 'LAB':
            feature_image = cv2.cvtColor(image, cv2.COLOR_RGB2LAB)
    else:
        feature_image = np.copy(image)
    return(feature_image)
In [30]:
def find_cars(img, y_start, y_stop, scale , model, X_scaler, orient,\
             pix_per_cell, cell_per_block, spatial_size, hist_bins, color_space,\
             hog_feat, spatial_feat, hist_feat):
    draw_img = np.copy(img)
    bboxes = []
    img_to_search = img[y_start:y_stop, :, :]
    color_space_to_search = convert_color(img_to_search, color_space = color_space)
    if scale != 1:
        # if scale > 1 the image becomes smaller
        # if scale < 1 the image becomes larger
        imshape = color_space_to_search.shape
        color_space_to_search = cv2.resize(color_space_to_search, (np.int(imshape[1]/scale), np.int(imshape[0]/scale)))
        
    # Get the color channels
    ch1 = color_space_to_search[:, :, 0]
    ch2 = color_space_to_search[:, :, 1]
    ch3 = color_space_to_search[:, :, 2]
    
    # Get the number of blocks in the image, so pix_per_cell number of pixels make up one cell/block
    nxblocks = (ch1.shape[1] // pix_per_cell) - 1
    nyblocks = (ch1.shape[0] // pix_per_cell) - 1
    
    n_feat_per_block = orient * cell_per_block ** 2
    
    # Partition the image 
    window = 64 # number of pixels for the sides of a square
    n_blocks_per_window = (window // pix_per_cell) - 1
    cells_per_step = 2 # how many blocks we move by
    nxsteps = (nxblocks - n_blocks_per_window) // cells_per_step
    nysteps = (nyblocks - n_blocks_per_window) // cells_per_step
    
    # Compute individual channel HOG features for the entire image
    hog1 = get_hog_features(ch1, orient, pix_per_cell, cell_per_block, feature_vec=False)
    hog2 = get_hog_features(ch2, orient, pix_per_cell, cell_per_block, feature_vec=False)
    hog3 = get_hog_features(ch3, orient, pix_per_cell, cell_per_block, feature_vec=False)
    
    for xb in range(nxsteps):
        for yb in range(nysteps):
            # See which "block" we are currently on
            ypos = yb * cells_per_step 
            xpos = xb * cells_per_step
            
            # Extract HOG for this patch
            if hog_feat:
                hog_feat1 = hog1[ypos:ypos+n_blocks_per_window, xpos:xpos+n_blocks_per_window].ravel() 
                hog_feat2 = hog2[ypos:ypos+n_blocks_per_window, xpos:xpos+n_blocks_per_window].ravel() 
                hog_feat3 = hog3[ypos:ypos+n_blocks_per_window, xpos:xpos+n_blocks_per_window].ravel() 
                hog_features = np.hstack((hog_feat1, hog_feat2, hog_feat3))
            
            # See what "pixel" we are currently on
            xleft = xpos * pix_per_cell 
            ytop = ypos * pix_per_cell
            
            # Extract the image patch
            subimg = cv2.resize(color_space_to_search[ytop:ytop+window, xleft:xleft+window], (window, window))
            
            # Get color features
            if spatial_feat:
                spatial_features = bin_spatial(subimg, size=spatial_size)
            else:
                spatial_features = np.array([])
            
            if hist_feat:
                hist_features = color_hist(subimg, nbins=hist_bins)
            else:
                hist_features = np.array([])
            
#             print(hog_features.shape)
#             print(spatial_features.shape)
#             print(hist_features.shape)
            # Scale features and make a prediction
            final_features = np.hstack((spatial_features, hist_features, hog_features))
#             print(final_features.shape)
            test_features = X_scaler.transform(final_features.reshape(1, -1))    
            test_prediction = model.predict(test_features)
            
            # Calculate where the bboxes will be in the rescaled image
            xbox_left = np.int(xleft*scale)
            ytop_draw = np.int(ytop*scale)
            win_draw = np.int(window*scale)
            
            if test_prediction == 1:
                # scale up to original image size
                bbox = ((xbox_left, ytop_draw+ystart), (xbox_left+win_draw,ytop_draw+win_draw+ystart))
                bboxes.append(bbox)
                cv2.rectangle(draw_img,bbox[0],bbox[1],(255,0,0),6)   
    return draw_img, bboxes
   

If the scale is < 1 than it means that the image will get scaled up. As the window size that we search in is fixed, this means that the boxes that are fitted on the image will be small. Ideally, we want to use different scales to detect cars, this is because cars are smaller in the distance and bigger when they are near us. By using a different scale to look at the images we are essentially changing the window size (as mentioned above. Due to the way that our classifiers are trained, we need the boxes that capture the car to look similar to the ones we have in our training images (i.e. only contain the car and nothing else).

Test to see if the pipeline for finding cars is working

In [31]:
# Setup the parameters
color_space = 'LAB' # Can be RGB, HSV, LUV, HLS, YUV, YCrCb, LAB
orient = 9  # HOG orientations
pix_per_cell = 8 # HOG pixels per cell
cell_per_block = 2 # HOG cells per block
hog_channel = 'ALL' # Can be 0, 1, 2, or "ALL"
spatial_size = (16, 16) # Spatial binning dimensions
hist_bins = 16    # Number of histogram bins
ystart = 400
ystop = 656
hog_feat = True
spatial_feat = True
hist_feat = True
In [32]:
# Load up the model
svc, X_scaler = load_model(color_space)
scale = np.arange(0.9, 1.9, 0.2)
all_bboxes_LAB = []
out_imgs = []
print(scale)
[ 0.9  1.1  1.3  1.5  1.7]
In [33]:
# Get a random image
LANE_LINES_FOLDER_NAME = "test_images"
lane_line_image_paths = glob.glob(os.path.join(LANE_LINES_FOLDER_NAME, '*.jpg'))
lane_line_images = [mpimg.imread(lane_line_image_path) for lane_line_image_path in lane_line_image_paths]
sample_image = lane_line_images[5]
plt.imshow(sample_image)
Out[33]:
<matplotlib.image.AxesImage at 0x1246ae828>
In [34]:
fig = plt.figure(figsize=(20, 20))
for i, scale_coef in enumerate(scale):
    out_img, bboxes = find_cars(sample_image, ystart, ystop, scale_coef, svc, \
                                X_scaler, orient, pix_per_cell, cell_per_block, \
                                spatial_size, hist_bins, color_space, hog_feat, spatial_feat, hist_feat)
    for bbox in bboxes:
        all_bboxes_LAB.append(bbox)
        
    ax = fig.add_subplot(5,2,i+1)
    ax.set_title(scale[i], fontsize = 30)
    ax.imshow(out_img)

It seems like the LAB color space is doing a good job finding the cars, but maybe the HSV color space will be even better?

In [35]:
# try using HSV color space
color_space = "HSV"
svc, X_scaler = load_model(color_space)
all_bboxes_HSV = []
In [36]:
fig = plt.figure(figsize=(20, 20))
for i, scale_coef in enumerate(scale):
    out_img, bboxes = find_cars(sample_image, ystart, ystop, scale_coef, svc, \
                                X_scaler, orient, pix_per_cell, cell_per_block, \
                                spatial_size, hist_bins, color_space, hog_feat, spatial_feat, hist_feat)
    for bbox in bboxes:
        all_bboxes_HSV.append(bbox)
        
    out_imgs.append(out_img)
    ax = fig.add_subplot(5,2,i+1)
    ax.set_title(scale[i], fontsize = 30)
    ax.imshow(out_img)

We see here that the HSV channel picks up a lot more noise than the LAB channel, so I will be using the LAB channel

Using a heat map to decide how many cars are in our image

In [37]:
from scipy.ndimage.measurements import label
def add_heat(heat, bboxes):
    for bbox in bboxes:
        heat[bbox[0][1] : bbox[1][1], bbox[0][0] : bbox[1][0]] += 1
    return heat

def apply_threshold(heatmap, threshold):
    # Zero out pixels below the threshold
    heatmap[heatmap <= threshold] = 0
    # Return thresholded map
    return heatmap

def draw_labeled_bboxes(img, labels):
    # Iterate through all detected cars
    for car_number in range(1, labels[1]+1):
        
        # Find pixels with each car_number label value
        nonzero = (labels[0] == car_number).nonzero()
        
        # Identify x and y values of those pixels
        nonzeroy = np.array(nonzero[0])
        nonzerox = np.array(nonzero[1])
        
        # Define the largest possible bounding box based on min/max x and y
        bbox = ((np.min(nonzerox), np.min(nonzeroy)), (np.max(nonzerox), np.max(nonzeroy)))
        # Draw the box on the image
        cv2.rectangle(img, bbox[0], bbox[1], (255,0,0), 6)
    # Return the image
    return img

See what the heat map for the LAB color channel looks like

In [38]:
# create heat map
heat = np.zeros_like(sample_image[:,:,0])

# add heat
heat = add_heat(heat, all_bboxes_LAB)

# threshold it to avoid false positives
heat = apply_threshold(heat, 5)

# find the number of cars
labels = label(heat)
draw_img = draw_labeled_bboxes(np.copy(sample_image), labels)
print("labels[0] is: {}".format(labels[0].shape))
print("There are: {} cars".format(labels[1]))
f, (ax1, ax2) = plt.subplots(1, 2, figsize = (30, 15))
ax1.imshow(heat, cmap = 'hot') 
ax2.imshow(draw_img)
labels[0] is: (720, 1280)
There are: 2 cars
Out[38]:
<matplotlib.image.AxesImage at 0x121dd3eb8>

See what the heat map for the HSV color channel looks like

In [39]:
# create heat map
heat = np.zeros_like(sample_image[:,:,0])

# add heat
heat = add_heat(heat, all_bboxes_HSV)

# threshold it to avoid false positives
heat = apply_threshold(heat, 8)

# find the number of cars
labels = label(heat)
draw_img = draw_labeled_bboxes(np.copy(sample_image), labels)
print("There are: {} car(s)".format(labels[1]))
f, (ax1, ax2) = plt.subplots(1, 2, figsize = (30, 15))
ax1.imshow(heat, cmap = 'hot') 
ax2.imshow(draw_img)
There are: 4 car(s)
Out[39]:
<matplotlib.image.AxesImage at 0x1245abfd0>

See what the heat map for the HSV+LAB color channel looks like

In [40]:
# create heat map
heat = np.zeros_like(sample_image[:,:,0])

# add heat
heat = add_heat(heat, all_bboxes_HSV + all_bboxes_LAB)

# threshold it to avoid false positives
heat = apply_threshold(heat, 13)

# find the number of cars
labels = label(heat)
draw_img = draw_labeled_bboxes(np.copy(sample_image), labels)
print("There are: {} car(s)".format(labels[1]))
f, (ax1, ax2) = plt.subplots(1, 2, figsize = (30, 15))
ax1.imshow(heat, cmap = 'hot') 
ax2.imshow(draw_img)
There are: 2 car(s)
Out[40]:
<matplotlib.image.AxesImage at 0x1219d9fd0>

Using non-maximum suppression to see how many cars are in our image

In [41]:
def nms(bboxes, overlapThreshold):
    
    # Check if the bboxes list is empty
    if len(bboxes) == 0:
        return[]
    
    # Turn the list into type float
    if bboxes.dtype.kind == "i":
        bboxes = bboxes.astype(np.float)
        
    # Define the list which will store the picked boxes
    pick = []
    
    # Define the points that define the corners of the bboxes
    x1 = bboxes[:, 0]
    y1 = bboxes[:, 1]
    x2 = bboxes[:, 2]
    y2 = bboxes[:, 3]
    
    # Calculate the area
    area = (x2 - x1 + 1 ) * (y2 - y1 + 1)
#     print(area)
    # Sort the boxes by the bottom-right y-coordinate of the bounding box 
    idxs = np.argsort(y2)
    
    while len(idxs) > 0:
        
        # Get the most bottom right bbox that is left in the array 
        last = len(idxs) - 1
        i = idxs[last]
        pick.append(i)
        
        # find the smallest possible bbox of all the bboxes that are left
        xx1 = np.maximum(x1[i], x1[idxs[:last]])
        yy1 = np.maximum(y1[i], y1[idxs[:last]])
        xx2 = np.minimum(x2[i], x2[idxs[:last]])
        yy2 = np.minimum(y2[i], y2[idxs[:last]])
        
        w = np.maximum(0, xx2 - xx1 + 1)
        h = np.maximum(0, yy2 - yy1 + 1)
        
        # Finding the overlap
        overlap = (w * h) / area[idxs[:last]]
        
        # delete all the indexes from the index list that overlapped, the lower the threshold, the more boxes get
        # filtered out
        idxs = np.delete(idxs, np.concatenate(([last], np.where(overlap > overlapThreshold)[0])))
        
    
    return(bboxes[pick].astype(np.int))

Using the LAB channel

In [42]:
concat_all_bboxes_nms = np.array([[bbox[0][0], bbox[0][1], bbox[1][0], bbox[1][1]] for bbox in all_bboxes_LAB])
# for bbox in concat_all_bboxes:
#     print(bbox[0][0])
bboxes_nms = nms(concat_all_bboxes_nms, 0.5)

all_bboxes_nms = [((x1, y1), (x2,y2)) for x1,y1,x2,y2 in bboxes_nms]
    
out_img = sample_image.copy()
for p1, p2 in all_bboxes_nms:
    cv2.rectangle(out_img, p1, p2, (255, 0, 0), 5)
plt.imshow(out_img)
Out[42]:
<matplotlib.image.AxesImage at 0x121fbd2e8>

Using the HSV channel

In [43]:
concat_all_bboxes_nms = np.array([[bbox[0][0], bbox[0][1], bbox[1][0], bbox[1][1]] for bbox in all_bboxes_HSV])
# for bbox in concat_all_bboxes:
#     print(bbox[0][0])
bboxes_nms = nms(concat_all_bboxes_nms, 0.5)

all_bboxes_nms = [((x1, y1), (x2,y2)) for x1,y1,x2,y2 in bboxes_nms]
    
out_img = sample_image.copy()
for p1, p2 in all_bboxes_nms:
    cv2.rectangle(out_img, p1, p2, (255, 0, 0), 5)
plt.imshow(out_img)
Out[43]:
<matplotlib.image.AxesImage at 0x1246ed358>

Seems like non-maximum suppression doesn't work that well, although this may be because the bboxes that we produced at of only square sizes and not rectangles like the one that exists in facenet

Setup the pipeline for detecting cars

In [44]:
class HeatmapContainer:
    def __init__(self, n = 10):
        self.heatmaps = []
        self.n = n
    def append_to_heatmaps(self, heatmap):
        if len(self.heatmaps) >= self.n:
            # insert to start of the list
            self.heatmaps.insert(0, heatmap)
            
            # then pop the last heatmap off
            self.heatmaps.pop(-1)
        else:
            self.heatmaps.insert(0, heatmap)
            
    def get_stacked_heatmaps(self): 
        if len(self.heatmaps) == 0:
            return([]) 
        stacked_heatmap = np.zeros_like(self.heatmaps[0]) 
        for heatmap in self.heatmaps:
            stacked_heatmap += np.array(heatmap)
        return(stacked_heatmap)
        
    
In [157]:
def process_image_vehicle_detection_stacked_heatmap(image, debug = False, labels = False):
    """
    Note here that the svc is trained on a HSV image, and so the image is 
    changed to HSV color space in find_cars function when the classfications
    are being made
    """
    orient = 9  # HOG orientations
    pix_per_cell = 8 # HOG pixels per cell
    cell_per_block = 2 # HOG cells per block
    spatial_size = (16, 16) # Spatial binning dimensions
    hist_bins = 16    # Number of histogram bins
    y_start_stop = [400, 720] # Min and max in y to search in slide_window()
    ystart = 400 # where to start the search for vehicles
    ystop = 656 # where to stop the searcch for vehicles 
    scale = np.arange(0.9, 1.9, 0.2)
    color_space = "LAB"
    hog_feat = True
    spatial_feat = True
    hist_feat = True
    all_bboxes = []
    
    
    
    # Loading the model with respect to the color space that is being used
    svc, X_scaler = load_model(color_space)
    
    # Get the out_img and also the bboxes
    for i, scale_coef in enumerate(scale):
        out_img, bboxes = find_cars(image, ystart, ystop, scale_coef, svc, X_scaler, \
                                    orient, pix_per_cell, cell_per_block, spatial_size, \
                                    hist_bins, color_space, hog_feat, spatial_feat, hist_feat)
        for bbox in bboxes:
            # append the bboxes for heat map
            all_bboxes.append(bbox)
    
    # empty heat map
    heatmap = np.zeros_like(image[:,:,0])

    # add heat
    heatmap = add_heat(heatmap, all_bboxes)
    heatmapContainer.append_to_heatmaps(heatmap)
    
    stacked_heatmap = heatmapContainer.get_stacked_heatmaps()
    
    # threshold it to avoid false positives
    stacked_heatmap_thresholded = apply_threshold(stacked_heatmap, 12)
    
    # find the number of cars
    labels = label(stacked_heatmap_thresholded)
    
    # return final image
    heat_bboxes_img = draw_labeled_bboxes(np.copy(image), labels)
    
    if debug: 
        return(out_img, heat_bboxes_img, stacked_heatmap_thresholded)
    elif label:
        return(labels)
    else:
        return(heat_bboxes_img)
        

Some helper function to extract some frames from the video

In [158]:
def load_video(video_name):
    return(pickle.load(open("{}_video.p".format(video_name), "rb"))[video_name])

def save_video(video_name, video_frames):
    video_frames = np.array(video_frames)
    video_trim = {video_name : video_frames}
    pickle.dump(video_trim, open("{}_video.p".format(video_name), "wb"))
In [48]:
# NOTE: Uncomment below if the video needs to be saved again
# video_frames = []
# def add_video_frames(image):
#     video_frames.append(image)
#     return(image) 
# project_video_output = 'placeholder.mp4'
# clip1 = VideoFileClip("white_car.mp4")
# project_video_clip = clip1.fl_image(add_video_frames) #NOTE: this function expects color images!!
# %time project_video_clip.write_videofile(project_video_output, audio=False)

# save_video("white_car", video_frames)

white_car = load_video("white_car")
white_car.shape
Out[48]:
(276, 720, 1280, 3)
In [159]:
# See some output from the function
import matplotlib.gridspec as gridspec

heatmapContainer = HeatmapContainer()
fig = plt.figure(figsize = (20, 20))
gs = gridspec.GridSpec(5, 3)
for i, frame in enumerate(white_car[0:5:]):
    bboxes, thresh_bboxes, heatmap = process_image_vehicle_detection_stacked_heatmap(frame, debug = True)
    for j, image in enumerate([bboxes, thresh_bboxes, heatmap]):
        ax = fig.add_subplot(gs[i,j])
        if j != 2:
            ax.imshow(image)
        else:
            ax.imshow(image, cmap = 'hot')
        ax.set_title("Frame: {}".format(i+1))

Running the detection algorithm on the video

In [236]:
from moviepy.editor import VideoFileClip
from IPython.display import HTML
In [161]:
# This cell is for debugging purposes so that I can run my vehicle recognition software on 
# small parts of the video
heatmapContainer = HeatmapContainer()
time = "11-15"
project_video_output = '{}result_final.mp4'.format(time)
clip1 = VideoFileClip("{}.mp4".format(time))
project_video_clip = clip1.fl_image(process_image_lane_and_vehicle_detection) #NOTE: this function expects color images!!
%time project_video_clip.write_videofile(project_video_output, audio=False)
Lanes were detected
[MoviePy] >>>> Building video 11-15result_final.mp4
[MoviePy] Writing video 11-15result_final.mp4
  0%|          | 0/115 [00:00<?, ?it/s]
  1%|          | 1/115 [00:03<06:43,  3.54s/it]
Lanes were detected
  2%|▏         | 2/115 [00:07<06:57,  3.69s/it]
Lanes were detected
  3%|▎         | 3/115 [00:11<06:57,  3.73s/it]
Lanes were detected
  3%|▎         | 4/115 [00:15<06:51,  3.71s/it]
Lanes were detected
  4%|▍         | 5/115 [00:20<07:55,  4.32s/it]
Lanes were detected
  5%|▌         | 6/115 [00:24<07:38,  4.20s/it]
Lanes were detected
  6%|▌         | 7/115 [00:28<07:19,  4.07s/it]
Lanes were detected
  7%|▋         | 8/115 [00:32<07:14,  4.06s/it]
Lanes were detected
  8%|▊         | 9/115 [00:37<07:35,  4.30s/it]
Lanes were detected
  9%|▊         | 10/115 [00:41<07:14,  4.14s/it]
Lanes were detected
 10%|▉         | 11/115 [00:45<07:12,  4.16s/it]
Lanes were detected
 10%|█         | 12/115 [00:49<07:11,  4.19s/it]
Lanes were detected
 11%|█▏        | 13/115 [00:53<06:46,  3.99s/it]
Lanes were detected
 12%|█▏        | 14/115 [00:56<06:25,  3.82s/it]
Lanes were detected
 13%|█▎        | 15/115 [01:00<06:18,  3.79s/it]
Lanes were detected
 14%|█▍        | 16/115 [01:04<06:14,  3.79s/it]
Lanes were detected
 15%|█▍        | 17/115 [01:08<06:30,  3.99s/it]
Lanes were detected
 16%|█▌        | 18/115 [01:15<07:58,  4.93s/it]
Lanes were detected
 17%|█▋        | 19/115 [01:21<08:19,  5.20s/it]
Lanes were detected
 17%|█▋        | 20/115 [01:27<08:33,  5.41s/it]
Lanes were detected
 18%|█▊        | 21/115 [01:32<08:10,  5.21s/it]
Lanes were detected
 19%|█▉        | 22/115 [01:36<07:36,  4.91s/it]
Lanes were detected
 20%|██        | 23/115 [01:40<07:08,  4.65s/it]
Lanes were detected
 21%|██        | 24/115 [01:44<06:55,  4.56s/it]
Lanes were detected
 22%|██▏       | 25/115 [01:48<06:38,  4.43s/it]
Lanes were detected
 23%|██▎       | 26/115 [01:53<06:33,  4.42s/it]
Lanes were detected
 23%|██▎       | 27/115 [01:57<06:22,  4.35s/it]
Lanes were detected
 24%|██▍       | 28/115 [02:01<06:17,  4.34s/it]
Lanes were detected
 25%|██▌       | 29/115 [02:06<06:10,  4.31s/it]
Lanes were detected
 26%|██▌       | 30/115 [02:10<06:00,  4.24s/it]
Lanes were detected
 27%|██▋       | 31/115 [02:14<05:59,  4.27s/it]
Lanes were detected
 28%|██▊       | 32/115 [02:18<05:48,  4.20s/it]
Lanes were detected
 29%|██▊       | 33/115 [02:22<05:39,  4.14s/it]
Lanes were detected
 30%|██▉       | 34/115 [02:26<05:34,  4.13s/it]
Lanes were detected
 30%|███       | 35/115 [02:30<05:30,  4.13s/it]
Lanes were detected
 31%|███▏      | 36/115 [02:34<05:18,  4.04s/it]
Lanes were detected
 32%|███▏      | 37/115 [02:38<05:17,  4.07s/it]
Lanes were detected
 33%|███▎      | 38/115 [02:42<05:12,  4.06s/it]
Lanes were detected
 34%|███▍      | 39/115 [02:46<05:04,  4.01s/it]
Lanes were detected
 35%|███▍      | 40/115 [02:50<05:00,  4.01s/it]
Lanes were detected
 36%|███▌      | 41/115 [02:54<04:56,  4.00s/it]
Lanes were detected
 37%|███▋      | 42/115 [02:58<04:47,  3.95s/it]
Lanes were detected
 37%|███▋      | 43/115 [03:02<04:46,  3.99s/it]
Lanes were detected
 38%|███▊      | 44/115 [03:07<05:05,  4.31s/it]
Lanes were detected
 39%|███▉      | 45/115 [03:12<05:05,  4.37s/it]
Lanes were detected
 40%|████      | 46/115 [03:16<05:11,  4.52s/it]
Lanes were detected
 41%|████      | 47/115 [03:21<05:08,  4.54s/it]
Lanes were detected
 42%|████▏     | 48/115 [03:28<05:56,  5.33s/it]
Lanes were detected
 43%|████▎     | 49/115 [03:34<05:59,  5.45s/it]
Lanes were detected
 43%|████▎     | 50/115 [03:40<06:02,  5.58s/it]
Lanes were detected
 44%|████▍     | 51/115 [03:45<05:40,  5.32s/it]
Lanes were detected
 45%|████▌     | 52/115 [03:49<05:22,  5.11s/it]
Lanes were detected
 46%|████▌     | 53/115 [03:54<05:11,  5.02s/it]
Lanes were detected
 47%|████▋     | 54/115 [03:58<04:53,  4.82s/it]
Lanes were detected
 48%|████▊     | 55/115 [04:02<04:37,  4.62s/it]
Lanes were detected
 49%|████▊     | 56/115 [04:07<04:29,  4.57s/it]
Lanes were detected
 50%|████▉     | 57/115 [04:11<04:16,  4.42s/it]
Lanes were detected
 50%|█████     | 58/115 [04:15<04:06,  4.32s/it]
Lanes were detected
 51%|█████▏    | 59/115 [04:19<04:02,  4.33s/it]
Lanes were detected
 52%|█████▏    | 60/115 [04:23<03:53,  4.24s/it]
Lanes were detected
 53%|█████▎    | 61/115 [04:28<03:50,  4.26s/it]
Lanes were detected
 54%|█████▍    | 62/115 [04:32<03:41,  4.18s/it]
Lanes were detected
 55%|█████▍    | 63/115 [04:36<03:34,  4.12s/it]
Lanes were detected
 56%|█████▌    | 64/115 [04:40<03:32,  4.17s/it]
Lanes were detected
 57%|█████▋    | 65/115 [04:44<03:26,  4.12s/it]
Lanes were detected
 57%|█████▋    | 66/115 [04:48<03:21,  4.11s/it]
Lanes were detected
 58%|█████▊    | 67/115 [04:52<03:15,  4.08s/it]
Lanes were detected
 59%|█████▉    | 68/115 [04:56<03:11,  4.07s/it]
Lanes were detected
 60%|██████    | 69/115 [05:00<03:09,  4.12s/it]
Lanes were detected
 61%|██████    | 70/115 [05:04<03:03,  4.08s/it]
Lanes were detected
 62%|██████▏   | 71/115 [05:08<02:57,  4.04s/it]
Lanes were detected
 63%|██████▎   | 72/115 [05:13<02:57,  4.13s/it]
Lanes were detected
 63%|██████▎   | 73/115 [05:17<02:51,  4.09s/it]
Lanes were detected
 64%|██████▍   | 74/115 [05:21<02:48,  4.11s/it]
Lanes were detected
 65%|██████▌   | 75/115 [05:26<02:54,  4.36s/it]
Lanes were detected
 66%|██████▌   | 76/115 [05:30<02:48,  4.32s/it]
Lanes were detected
 67%|██████▋   | 77/115 [05:34<02:40,  4.23s/it]
Lanes were detected
 68%|██████▊   | 78/115 [05:38<02:34,  4.17s/it]
Lanes were detected
 69%|██████▊   | 79/115 [05:42<02:28,  4.13s/it]
Lanes were detected
 70%|██████▉   | 80/115 [05:48<02:46,  4.76s/it]
Lanes were detected
 70%|███████   | 81/115 [05:54<02:53,  5.11s/it]
Lanes were detected
 71%|███████▏  | 82/115 [05:59<02:48,  5.11s/it]
Lanes were detected
 72%|███████▏  | 83/115 [06:03<02:32,  4.77s/it]
Lanes were detected
 73%|███████▎  | 84/115 [06:07<02:20,  4.54s/it]
Lanes were detected
 74%|███████▍  | 85/115 [06:12<02:13,  4.45s/it]
Lanes were detected
 75%|███████▍  | 86/115 [06:16<02:06,  4.36s/it]
Lanes were detected
 76%|███████▌  | 87/115 [06:20<02:00,  4.29s/it]
Lanes were detected
 77%|███████▋  | 88/115 [06:24<01:54,  4.22s/it]
Lanes were detected
 77%|███████▋  | 89/115 [06:28<01:48,  4.18s/it]
Lanes were detected
 78%|███████▊  | 90/115 [06:32<01:43,  4.14s/it]
Lanes were detected
 79%|███████▉  | 91/115 [06:37<01:46,  4.43s/it]
Lanes were detected
 80%|████████  | 92/115 [06:42<01:45,  4.57s/it]
Lanes were detected
 81%|████████  | 93/115 [06:47<01:40,  4.55s/it]
Lanes were detected
 82%|████████▏ | 94/115 [06:51<01:33,  4.45s/it]
Lanes were detected
 83%|████████▎ | 95/115 [06:55<01:28,  4.41s/it]
Lanes were detected
 83%|████████▎ | 96/115 [06:59<01:21,  4.29s/it]
Lanes were detected
 84%|████████▍ | 97/115 [07:03<01:16,  4.23s/it]
Lanes were detected
 85%|████████▌ | 98/115 [07:07<01:11,  4.19s/it]
Lanes were detected
 86%|████████▌ | 99/115 [07:11<01:05,  4.12s/it]
Lanes were detected
 87%|████████▋ | 100/115 [07:15<01:00,  4.07s/it]
Lanes were detected
 88%|████████▊ | 101/115 [07:19<00:56,  4.07s/it]
Lanes were detected
 89%|████████▊ | 102/115 [07:25<00:59,  4.59s/it]
Lanes were detected
 90%|████████▉ | 103/115 [07:29<00:54,  4.55s/it]
Lanes were detected
 90%|█████████ | 104/115 [07:33<00:47,  4.34s/it]
Lanes were detected
 91%|█████████▏| 105/115 [07:37<00:41,  4.19s/it]
Lanes were detected
 92%|█████████▏| 106/115 [07:41<00:36,  4.08s/it]
Lanes were detected
 93%|█████████▎| 107/115 [07:45<00:31,  3.95s/it]
Lanes were detected
 94%|█████████▍| 108/115 [07:48<00:27,  3.87s/it]
Lanes were detected
 95%|█████████▍| 109/115 [07:52<00:22,  3.80s/it]
Lanes were detected
 96%|█████████▌| 110/115 [07:56<00:18,  3.77s/it]
Lanes were detected
 97%|█████████▋| 111/115 [07:59<00:14,  3.67s/it]
Lanes were detected
 97%|█████████▋| 112/115 [08:03<00:11,  3.69s/it]
Lanes were detected
 98%|█████████▊| 113/115 [08:06<00:07,  3.60s/it]
Lanes were detected
 99%|█████████▉| 114/115 [08:10<00:03,  3.57s/it]
Lanes were detected
100%|██████████| 115/115 [08:13<00:00,  3.58s/it]
Lanes were detected

[MoviePy] Done.
[MoviePy] >>>> Video ready: 11-15result_final.mp4 

CPU times: user 6min 42s, sys: 37.8 s, total: 7min 19s
Wall time: 8min 15s
In [160]:
# Combine the lane detection algorithm and the vehicle detection algorithm
def process_image_lane_and_vehicle_detection(image):
    labels = process_image_vehicle_detection_stacked_heatmap(image, labels = True)
    lane_line_detected_image = process_image_detect_lane_line(image)
    out_image = draw_labeled_bboxes(np.copy(lane_line_detected_image), labels)
    return(out_image) 
In [162]:
heatmapContainer = HeatmapContainer()
project_video_output = 'project_video_result_combined.mp4'
clip1 = VideoFileClip("project_video.mp4")
project_video_clip = clip1.fl_image(process_image_lane_and_vehicle_detection) #NOTE: this function expects color images!!
%time project_video_clip.write_videofile(project_video_output, audio=False)
Lanes were detected
[MoviePy] >>>> Building video project_video_result_combined.mp4
[MoviePy] Writing video project_video_result_combined.mp4
  0%|          | 0/1261 [00:00<?, ?it/s]
  0%|          | 1/1261 [00:04<1:34:34,  4.50s/it]
Lanes were detected
  0%|          | 2/1261 [00:09<1:36:38,  4.61s/it]
Lanes were detected
  0%|          | 3/1261 [00:13<1:35:58,  4.58s/it]
Lanes were detected
  0%|          | 4/1261 [00:17<1:29:51,  4.29s/it]
Lanes were detected
  0%|          | 5/1261 [00:21<1:26:16,  4.12s/it]
Lanes were detected
  0%|          | 6/1261 [00:24<1:22:42,  3.95s/it]
Lanes were detected
  1%|          | 7/1261 [00:28<1:20:50,  3.87s/it]
Lanes were detected
  1%|          | 8/1261 [00:32<1:19:07,  3.79s/it]
Lanes were detected
  1%|          | 9/1261 [00:35<1:19:52,  3.83s/it]
Lanes were detected
  1%|          | 10/1261 [00:39<1:17:38,  3.72s/it]
Lanes were detected
  1%|          | 11/1261 [00:43<1:17:04,  3.70s/it]
Lanes were detected
  1%|          | 12/1261 [00:47<1:19:45,  3.83s/it]
Lanes were detected
  1%|          | 13/1261 [00:50<1:17:39,  3.73s/it]
Lanes were detected
  1%|          | 14/1261 [00:54<1:16:06,  3.66s/it]
Lanes were detected
  1%|          | 15/1261 [00:58<1:17:48,  3.75s/it]
Lanes were detected
  1%|▏         | 16/1261 [01:02<1:22:44,  3.99s/it]
Lanes were detected
  1%|▏         | 17/1261 [01:07<1:25:34,  4.13s/it]
Lanes were detected
  1%|▏         | 18/1261 [01:12<1:31:49,  4.43s/it]
Lanes were detected
  2%|▏         | 19/1261 [01:17<1:36:05,  4.64s/it]
Lanes were detected
  2%|▏         | 20/1261 [01:22<1:38:41,  4.77s/it]
Lanes were detected
  2%|▏         | 21/1261 [01:26<1:36:21,  4.66s/it]
Lanes were detected
  2%|▏         | 22/1261 [01:32<1:41:32,  4.92s/it]
Lanes were detected
  2%|▏         | 23/1261 [01:36<1:37:39,  4.73s/it]
Lanes were detected
  2%|▏         | 24/1261 [01:41<1:36:18,  4.67s/it]
Lanes were detected
  2%|▏         | 25/1261 [01:45<1:32:11,  4.48s/it]
Lanes were detected
  2%|▏         | 26/1261 [01:49<1:27:55,  4.27s/it]
Lanes were detected
  2%|▏         | 27/1261 [01:52<1:23:55,  4.08s/it]
Lanes were detected
  2%|▏         | 28/1261 [01:56<1:20:59,  3.94s/it]
Lanes were detected
  2%|▏         | 29/1261 [01:59<1:18:26,  3.82s/it]
Lanes were detected
  2%|▏         | 30/1261 [02:03<1:19:14,  3.86s/it]
Lanes were detected
  2%|▏         | 31/1261 [02:07<1:18:06,  3.81s/it]
Lanes were detected
  3%|▎         | 32/1261 [02:11<1:17:10,  3.77s/it]
Lanes were detected
  3%|▎         | 33/1261 [02:14<1:15:39,  3.70s/it]
Lanes were detected
  3%|▎         | 34/1261 [02:18<1:14:45,  3.66s/it]
Lanes were detected
  3%|▎         | 35/1261 [02:22<1:15:20,  3.69s/it]
Lanes were detected
  3%|▎         | 36/1261 [02:25<1:16:32,  3.75s/it]
Lanes were detected
  3%|▎         | 37/1261 [02:29<1:17:04,  3.78s/it]
Lanes were detected
  3%|▎         | 38/1261 [02:34<1:19:59,  3.92s/it]
Lanes were detected
  3%|▎         | 39/1261 [02:37<1:18:53,  3.87s/it]
Lanes were detected
  3%|▎         | 40/1261 [02:41<1:17:19,  3.80s/it]
Lanes were detected
  3%|▎         | 41/1261 [02:44<1:15:14,  3.70s/it]
Lanes were detected
  3%|▎         | 42/1261 [02:48<1:16:36,  3.77s/it]
Lanes were detected
  3%|▎         | 43/1261 [02:53<1:20:48,  3.98s/it]
Lanes were detected
  3%|▎         | 44/1261 [02:57<1:23:53,  4.14s/it]
Lanes were detected
  4%|▎         | 45/1261 [03:03<1:30:20,  4.46s/it]
Lanes were detected
  4%|▎         | 46/1261 [03:07<1:30:12,  4.45s/it]
Lanes were detected
  4%|▎         | 47/1261 [03:11<1:27:17,  4.31s/it]
Lanes were detected
  4%|▍         | 48/1261 [03:15<1:26:40,  4.29s/it]
Lanes were detected
  4%|▍         | 49/1261 [03:20<1:29:50,  4.45s/it]
Lanes were detected
  4%|▍         | 50/1261 [03:25<1:33:38,  4.64s/it]
Lanes were detected
  4%|▍         | 51/1261 [03:31<1:43:49,  5.15s/it]
Lanes were detected
  4%|▍         | 52/1261 [03:37<1:45:59,  5.26s/it]
Lanes were detected
  4%|▍         | 53/1261 [03:41<1:38:32,  4.89s/it]
Lanes were detected
  4%|▍         | 54/1261 [03:45<1:30:44,  4.51s/it]
Lanes were detected
  4%|▍         | 55/1261 [03:48<1:25:41,  4.26s/it]
Lanes were detected
  4%|▍         | 56/1261 [03:52<1:22:30,  4.11s/it]
Lanes were detected
  5%|▍         | 57/1261 [03:56<1:22:19,  4.10s/it]
Lanes were detected
  5%|▍         | 58/1261 [04:00<1:19:25,  3.96s/it]
Lanes were detected
  5%|▍         | 59/1261 [04:04<1:18:25,  3.91s/it]
Lanes were detected
  5%|▍         | 60/1261 [04:08<1:22:14,  4.11s/it]
Lanes were detected
  5%|▍         | 61/1261 [04:12<1:23:23,  4.17s/it]
Lanes were detected
  5%|▍         | 62/1261 [04:18<1:30:30,  4.53s/it]
Lanes were detected
  5%|▍         | 63/1261 [04:24<1:43:26,  5.18s/it]
Lanes were detected
  5%|▌         | 64/1261 [04:31<1:53:18,  5.68s/it]
Lanes were detected
  5%|▌         | 65/1261 [04:35<1:42:41,  5.15s/it]
Lanes were detected
  5%|▌         | 66/1261 [04:39<1:35:56,  4.82s/it]
Lanes were detected
  5%|▌         | 67/1261 [04:44<1:34:45,  4.76s/it]
Lanes were detected
  5%|▌         | 68/1261 [04:52<1:56:15,  5.85s/it]
Lanes were detected
  5%|▌         | 69/1261 [04:58<1:53:52,  5.73s/it]
Lanes were detected
  6%|▌         | 70/1261 [05:04<1:54:19,  5.76s/it]
Lanes were detected
  6%|▌         | 71/1261 [05:11<2:01:16,  6.11s/it]
Lanes were detected
  6%|▌         | 72/1261 [05:22<2:30:05,  7.57s/it]
Lanes were detected
  6%|▌         | 73/1261 [05:33<2:51:36,  8.67s/it]
Lanes were detected
  6%|▌         | 74/1261 [05:39<2:38:28,  8.01s/it]
Lanes were detected
  6%|▌         | 75/1261 [05:45<2:22:17,  7.20s/it]
Lanes were detected
  6%|▌         | 76/1261 [05:50<2:10:33,  6.61s/it]
Lanes were detected
  6%|▌         | 77/1261 [05:54<1:57:00,  5.93s/it]
Lanes were detected
  6%|▌         | 78/1261 [05:59<1:48:04,  5.48s/it]
Lanes were detected
  6%|▋         | 79/1261 [06:02<1:38:36,  5.01s/it]
Lanes were detected
  6%|▋         | 80/1261 [06:07<1:37:52,  4.97s/it]
Lanes were detected
  6%|▋         | 81/1261 [06:13<1:39:02,  5.04s/it]
Lanes were detected
  7%|▋         | 82/1261 [06:17<1:33:58,  4.78s/it]
Lanes were detected
  7%|▋         | 83/1261 [06:21<1:29:32,  4.56s/it]
Lanes were detected
  7%|▋         | 84/1261 [06:24<1:24:42,  4.32s/it]
Lanes were detected
  7%|▋         | 85/1261 [06:28<1:21:41,  4.17s/it]
Lanes were detected
  7%|▋         | 86/1261 [06:32<1:18:45,  4.02s/it]
Lanes were detected
  7%|▋         | 87/1261 [06:35<1:15:10,  3.84s/it]
Lanes were detected
  7%|▋         | 88/1261 [06:39<1:11:50,  3.67s/it]
Lanes were detected
  7%|▋         | 89/1261 [06:42<1:10:46,  3.62s/it]
Lanes were detected
  7%|▋         | 90/1261 [06:46<1:12:30,  3.72s/it]
Lanes were detected
  7%|▋         | 91/1261 [06:50<1:11:16,  3.65s/it]
Lanes were detected
  7%|▋         | 92/1261 [06:53<1:10:42,  3.63s/it]
Lanes were detected
  7%|▋         | 93/1261 [06:58<1:14:42,  3.84s/it]
Lanes were detected
  7%|▋         | 94/1261 [07:03<1:23:17,  4.28s/it]
Lanes were detected
  8%|▊         | 95/1261 [07:08<1:25:20,  4.39s/it]
Lanes were detected
  8%|▊         | 96/1261 [07:13<1:29:10,  4.59s/it]
Lanes were detected
  8%|▊         | 97/1261 [07:17<1:26:29,  4.46s/it]
Lanes were detected
  8%|▊         | 98/1261 [07:21<1:24:06,  4.34s/it]
Lanes were detected
  8%|▊         | 99/1261 [07:25<1:21:19,  4.20s/it]
Lanes were detected
  8%|▊         | 100/1261 [07:29<1:23:34,  4.32s/it]
Lanes were detected
  8%|▊         | 101/1261 [07:34<1:24:35,  4.38s/it]
Lanes were detected
  8%|▊         | 102/1261 [07:38<1:24:52,  4.39s/it]
Lanes were detected
  8%|▊         | 103/1261 [07:42<1:20:05,  4.15s/it]
Lanes were detected
  8%|▊         | 104/1261 [07:45<1:16:29,  3.97s/it]
Lanes were detected
  8%|▊         | 105/1261 [07:49<1:14:17,  3.86s/it]
Lanes were detected
  8%|▊         | 106/1261 [07:54<1:19:22,  4.12s/it]
Lanes were detected
  8%|▊         | 107/1261 [07:58<1:18:39,  4.09s/it]
Lanes were detected
  9%|▊         | 108/1261 [08:03<1:26:31,  4.50s/it]
Lanes were detected
  9%|▊         | 109/1261 [08:07<1:25:29,  4.45s/it]
Lanes were detected
  9%|▊         | 110/1261 [08:12<1:27:20,  4.55s/it]
Lanes were detected
  9%|▉         | 111/1261 [08:16<1:21:55,  4.27s/it]
Lanes were detected
  9%|▉         | 112/1261 [08:21<1:25:37,  4.47s/it]
Lanes were detected
  9%|▉         | 113/1261 [08:26<1:30:46,  4.74s/it]
Lanes were detected
  9%|▉         | 114/1261 [08:31<1:32:48,  4.86s/it]
Lanes were detected
  9%|▉         | 115/1261 [08:36<1:29:04,  4.66s/it]
Lanes were detected
  9%|▉         | 116/1261 [08:39<1:23:47,  4.39s/it]
Lanes were detected
  9%|▉         | 117/1261 [08:43<1:19:53,  4.19s/it]
Lanes were detected
  9%|▉         | 118/1261 [08:46<1:15:53,  3.98s/it]
Lanes were detected
  9%|▉         | 119/1261 [08:50<1:14:19,  3.91s/it]
Lanes were detected
 10%|▉         | 120/1261 [08:54<1:13:38,  3.87s/it]
Lanes were detected
 10%|▉         | 121/1261 [08:59<1:21:04,  4.27s/it]
Lanes were detected
 10%|▉         | 122/1261 [09:03<1:18:08,  4.12s/it]
Lanes were detected
 10%|▉         | 123/1261 [09:07<1:16:37,  4.04s/it]
Lanes were detected
 10%|▉         | 124/1261 [09:11<1:16:14,  4.02s/it]
Lanes were detected
 10%|▉         | 125/1261 [09:15<1:17:24,  4.09s/it]
Lanes were detected
 10%|▉         | 126/1261 [09:20<1:20:58,  4.28s/it]
Lanes were detected
 10%|█         | 127/1261 [09:25<1:25:21,  4.52s/it]
Lanes were detected
 10%|█         | 128/1261 [09:30<1:27:45,  4.65s/it]
Lanes were detected
 10%|█         | 129/1261 [09:34<1:26:38,  4.59s/it]
Lanes were detected
 10%|█         | 130/1261 [09:38<1:21:27,  4.32s/it]
Lanes were detected
 10%|█         | 131/1261 [09:42<1:18:23,  4.16s/it]
Lanes were detected
 10%|█         | 132/1261 [09:45<1:13:33,  3.91s/it]
Lanes were detected
 11%|█         | 133/1261 [09:49<1:11:37,  3.81s/it]
Lanes were detected
 11%|█         | 134/1261 [09:52<1:08:59,  3.67s/it]
Lanes were detected
 11%|█         | 135/1261 [09:55<1:06:50,  3.56s/it]
Lanes were detected
 11%|█         | 136/1261 [09:59<1:06:06,  3.53s/it]
Lanes were detected
 11%|█         | 137/1261 [10:02<1:05:46,  3.51s/it]
Lanes were detected
 11%|█         | 138/1261 [10:06<1:04:33,  3.45s/it]
Lanes were detected
 11%|█         | 139/1261 [10:09<1:03:55,  3.42s/it]
Lanes were detected
 11%|█         | 140/1261 [10:12<1:04:23,  3.45s/it]
Lanes were detected
 11%|█         | 141/1261 [10:16<1:05:57,  3.53s/it]
Lanes were detected
 11%|█▏        | 142/1261 [10:20<1:06:12,  3.55s/it]
Lanes were detected
 11%|█▏        | 143/1261 [10:23<1:06:50,  3.59s/it]
Lanes were detected
 11%|█▏        | 144/1261 [10:27<1:05:57,  3.54s/it]
Lanes were detected
 11%|█▏        | 145/1261 [10:31<1:07:06,  3.61s/it]
Lanes were detected
 12%|█▏        | 146/1261 [10:34<1:07:04,  3.61s/it]
Lanes were detected
 12%|█▏        | 147/1261 [10:38<1:08:04,  3.67s/it]
Lanes were detected
 12%|█▏        | 148/1261 [10:42<1:12:01,  3.88s/it]
Lanes were detected
 12%|█▏        | 149/1261 [10:46<1:10:56,  3.83s/it]
Lanes were detected
 12%|█▏        | 150/1261 [10:50<1:09:58,  3.78s/it]
Lanes were detected
 12%|█▏        | 151/1261 [10:54<1:10:46,  3.83s/it]
Lanes were detected
 12%|█▏        | 152/1261 [10:58<1:11:49,  3.89s/it]
Lanes were detected
 12%|█▏        | 153/1261 [11:02<1:14:32,  4.04s/it]
Lanes were detected
 12%|█▏        | 154/1261 [11:06<1:15:27,  4.09s/it]
Lanes were detected
 12%|█▏        | 155/1261 [11:10<1:15:01,  4.07s/it]
Lanes were detected
 12%|█▏        | 156/1261 [11:14<1:12:16,  3.92s/it]
Lanes were detected
 12%|█▏        | 157/1261 [11:18<1:11:05,  3.86s/it]
Lanes were detected
 13%|█▎        | 158/1261 [11:22<1:12:48,  3.96s/it]
Lanes were detected
 13%|█▎        | 159/1261 [11:26<1:13:27,  4.00s/it]
Lanes were detected
 13%|█▎        | 160/1261 [11:31<1:21:02,  4.42s/it]
Lanes were detected
 13%|█▎        | 161/1261 [11:35<1:19:29,  4.34s/it]
Lanes were detected
 13%|█▎        | 162/1261 [11:39<1:17:13,  4.22s/it]
Lanes were detected
 13%|█▎        | 163/1261 [11:43<1:15:58,  4.15s/it]
Lanes were detected
 13%|█▎        | 164/1261 [11:47<1:14:50,  4.09s/it]
Lanes were detected
 13%|█▎        | 165/1261 [11:51<1:14:59,  4.11s/it]
Lanes were detected
 13%|█▎        | 166/1261 [11:55<1:12:39,  3.98s/it]
Lanes were detected
 13%|█▎        | 167/1261 [11:59<1:12:52,  4.00s/it]
Lanes were detected
 13%|█▎        | 168/1261 [12:03<1:11:50,  3.94s/it]
Lanes were detected
 13%|█▎        | 169/1261 [12:07<1:10:45,  3.89s/it]
Lanes were detected
 13%|█▎        | 170/1261 [12:10<1:09:06,  3.80s/it]
Lanes were detected
 14%|█▎        | 171/1261 [12:14<1:10:20,  3.87s/it]
Lanes were detected
 14%|█▎        | 172/1261 [12:18<1:09:29,  3.83s/it]
Lanes were detected
 14%|█▎        | 173/1261 [12:22<1:08:46,  3.79s/it]
Lanes were detected
 14%|█▍        | 174/1261 [12:26<1:10:19,  3.88s/it]
Lanes were detected
 14%|█▍        | 175/1261 [12:30<1:09:19,  3.83s/it]
Lanes were detected
 14%|█▍        | 176/1261 [12:34<1:09:36,  3.85s/it]
Lanes were detected
 14%|█▍        | 177/1261 [12:37<1:08:22,  3.78s/it]
Lanes were detected
 14%|█▍        | 178/1261 [12:41<1:08:21,  3.79s/it]
Lanes were detected
 14%|█▍        | 179/1261 [12:44<1:06:43,  3.70s/it]
Lanes were detected
 14%|█▍        | 180/1261 [12:48<1:05:26,  3.63s/it]
Lanes were detected
 14%|█▍        | 181/1261 [12:51<1:04:30,  3.58s/it]
Lanes were detected
 14%|█▍        | 182/1261 [12:56<1:09:11,  3.85s/it]
Lanes were detected
 15%|█▍        | 183/1261 [13:00<1:08:04,  3.79s/it]
Lanes were detected
 15%|█▍        | 184/1261 [13:05<1:16:05,  4.24s/it]
Lanes were detected
 15%|█▍        | 185/1261 [13:09<1:17:21,  4.31s/it]
Lanes were detected
 15%|█▍        | 186/1261 [13:13<1:15:27,  4.21s/it]
Lanes were detected
 15%|█▍        | 187/1261 [13:17<1:12:10,  4.03s/it]
Lanes were detected
 15%|█▍        | 188/1261 [13:21<1:12:48,  4.07s/it]
Lanes were detected
 15%|█▍        | 189/1261 [13:25<1:11:04,  3.98s/it]
Lanes were detected
 15%|█▌        | 190/1261 [13:29<1:13:27,  4.11s/it]
Lanes were detected
 15%|█▌        | 191/1261 [13:33<1:12:29,  4.07s/it]
Lanes were detected
 15%|█▌        | 192/1261 [13:37<1:09:09,  3.88s/it]
Lanes were detected
 15%|█▌        | 193/1261 [13:40<1:08:48,  3.87s/it]
Lanes were detected
 15%|█▌        | 194/1261 [13:44<1:08:19,  3.84s/it]
Lanes were detected
 15%|█▌        | 195/1261 [13:48<1:07:43,  3.81s/it]
Lanes were detected
 16%|█▌        | 196/1261 [13:52<1:06:45,  3.76s/it]
Lanes were detected
 16%|█▌        | 197/1261 [13:56<1:08:25,  3.86s/it]
Lanes were detected
 16%|█▌        | 198/1261 [14:00<1:08:38,  3.87s/it]
Lanes were detected
 16%|█▌        | 199/1261 [14:03<1:08:00,  3.84s/it]
Lanes were detected
 16%|█▌        | 200/1261 [14:08<1:09:14,  3.92s/it]
Lanes were detected
 16%|█▌        | 201/1261 [14:12<1:12:59,  4.13s/it]
Lanes were detected
 16%|█▌        | 202/1261 [14:16<1:11:44,  4.06s/it]
Lanes were detected
 16%|█▌        | 203/1261 [14:20<1:09:02,  3.91s/it]
Lanes were detected
 16%|█▌        | 204/1261 [14:24<1:09:11,  3.93s/it]
Lanes were detected
 16%|█▋        | 205/1261 [14:27<1:08:15,  3.88s/it]
Lanes were detected
 16%|█▋        | 206/1261 [14:31<1:08:11,  3.88s/it]
Lanes were detected
 16%|█▋        | 207/1261 [14:35<1:06:48,  3.80s/it]
Lanes were detected
 16%|█▋        | 208/1261 [14:39<1:06:05,  3.77s/it]
Lanes were detected
 17%|█▋        | 209/1261 [14:42<1:05:05,  3.71s/it]
Lanes were detected
 17%|█▋        | 210/1261 [14:46<1:05:41,  3.75s/it]
Lanes were detected
 17%|█▋        | 211/1261 [14:50<1:05:00,  3.71s/it]
Lanes were detected
 17%|█▋        | 212/1261 [14:53<1:04:54,  3.71s/it]
Lanes were detected
 17%|█▋        | 213/1261 [14:57<1:04:26,  3.69s/it]
Lanes were detected
 17%|█▋        | 214/1261 [15:01<1:06:13,  3.79s/it]
Lanes were detected
 17%|█▋        | 215/1261 [15:04<1:04:42,  3.71s/it]
Lanes were detected
 17%|█▋        | 216/1261 [15:08<1:02:46,  3.60s/it]
Lanes were detected
 17%|█▋        | 217/1261 [15:11<1:01:35,  3.54s/it]
Lanes were detected
 17%|█▋        | 218/1261 [15:15<1:00:35,  3.49s/it]
Lanes were detected
 17%|█▋        | 219/1261 [15:18<1:01:23,  3.53s/it]
Lanes were detected
 17%|█▋        | 220/1261 [15:22<1:00:16,  3.47s/it]
Lanes were detected
 18%|█▊        | 221/1261 [15:25<59:27,  3.43s/it]  
Lanes were detected
 18%|█▊        | 222/1261 [15:29<1:00:41,  3.50s/it]
Lanes were detected
 18%|█▊        | 223/1261 [15:33<1:03:11,  3.65s/it]
Lanes were detected
 18%|█▊        | 224/1261 [15:36<1:04:26,  3.73s/it]
Lanes were detected
 18%|█▊        | 225/1261 [15:40<1:04:58,  3.76s/it]
Lanes were detected
 18%|█▊        | 226/1261 [15:44<1:06:33,  3.86s/it]
Lanes were detected
 18%|█▊        | 227/1261 [15:48<1:06:11,  3.84s/it]
Lanes were detected
 18%|█▊        | 228/1261 [15:52<1:05:23,  3.80s/it]
Lanes were detected
 18%|█▊        | 229/1261 [15:56<1:04:36,  3.76s/it]
Lanes were detected
 18%|█▊        | 230/1261 [15:59<1:04:26,  3.75s/it]
Lanes were detected
 18%|█▊        | 231/1261 [16:03<1:04:33,  3.76s/it]
Lanes were detected
 18%|█▊        | 232/1261 [16:07<1:05:26,  3.82s/it]
Lanes were detected
 18%|█▊        | 233/1261 [16:11<1:05:40,  3.83s/it]
Lanes were detected
 19%|█▊        | 234/1261 [16:15<1:05:34,  3.83s/it]
Lanes were detected
 19%|█▊        | 235/1261 [16:18<1:04:17,  3.76s/it]
Lanes were detected
 19%|█▊        | 236/1261 [16:22<1:04:37,  3.78s/it]
Lanes were detected
 19%|█▉        | 237/1261 [16:26<1:03:54,  3.74s/it]
Lanes were detected
 19%|█▉        | 238/1261 [16:29<1:03:32,  3.73s/it]
Lanes were detected
 19%|█▉        | 239/1261 [16:34<1:05:00,  3.82s/it]
Lanes were detected
 19%|█▉        | 240/1261 [16:38<1:07:17,  3.95s/it]
Lanes were detected
 19%|█▉        | 241/1261 [16:41<1:05:11,  3.84s/it]
Lanes were detected
 19%|█▉        | 242/1261 [16:45<1:03:14,  3.72s/it]
Lanes were detected
 19%|█▉        | 243/1261 [16:48<1:01:40,  3.63s/it]
Lanes were detected
 19%|█▉        | 244/1261 [16:52<1:00:54,  3.59s/it]
Lanes were detected
 19%|█▉        | 245/1261 [16:56<1:02:09,  3.67s/it]
Lanes were detected
 20%|█▉        | 246/1261 [16:59<1:02:41,  3.71s/it]
Lanes were detected
 20%|█▉        | 247/1261 [17:04<1:08:12,  4.04s/it]
Lanes were detected
 20%|█▉        | 248/1261 [17:08<1:07:23,  3.99s/it]
Lanes were detected
 20%|█▉        | 249/1261 [17:12<1:05:59,  3.91s/it]
Lanes were detected
 20%|█▉        | 250/1261 [17:15<1:03:45,  3.78s/it]
Lanes were detected
 20%|█▉        | 251/1261 [17:19<1:04:13,  3.81s/it]
Lanes were detected
 20%|█▉        | 252/1261 [17:23<1:05:40,  3.90s/it]
Lanes were detected
 20%|██        | 253/1261 [17:27<1:04:19,  3.83s/it]
Lanes were detected
 20%|██        | 254/1261 [17:32<1:10:05,  4.18s/it]
Lanes were detected
 20%|██        | 255/1261 [17:36<1:08:43,  4.10s/it]
Lanes were detected
 20%|██        | 256/1261 [17:40<1:07:13,  4.01s/it]
Lanes were detected
 20%|██        | 257/1261 [17:43<1:05:30,  3.91s/it]
Lanes were detected
 20%|██        | 258/1261 [17:47<1:04:10,  3.84s/it]
Lanes were detected
 21%|██        | 259/1261 [17:51<1:04:43,  3.88s/it]
Lanes were detected
 21%|██        | 260/1261 [17:55<1:04:35,  3.87s/it]
Lanes were detected
 21%|██        | 261/1261 [17:59<1:03:45,  3.83s/it]
Lanes were detected
 21%|██        | 262/1261 [18:02<1:01:38,  3.70s/it]
Lanes were detected
 21%|██        | 263/1261 [18:05<1:00:26,  3.63s/it]
Lanes were detected
 21%|██        | 264/1261 [18:09<59:27,  3.58s/it]  
Lanes were detected
 21%|██        | 265/1261 [18:13<59:42,  3.60s/it]
Lanes were detected
 21%|██        | 266/1261 [18:16<59:34,  3.59s/it]
Lanes were detected
 21%|██        | 267/1261 [18:20<1:00:36,  3.66s/it]
Lanes were detected
 21%|██▏       | 268/1261 [18:24<1:00:42,  3.67s/it]
Lanes were detected
 21%|██▏       | 269/1261 [18:27<59:49,  3.62s/it]  
Lanes were detected
 21%|██▏       | 270/1261 [18:31<1:00:15,  3.65s/it]
Lanes were detected
 21%|██▏       | 271/1261 [18:34<58:45,  3.56s/it]  
Lanes were detected
 22%|██▏       | 272/1261 [18:38<58:16,  3.53s/it]
Lanes were detected
 22%|██▏       | 273/1261 [18:41<57:58,  3.52s/it]
Lanes were detected
 22%|██▏       | 274/1261 [18:45<59:02,  3.59s/it]
Lanes were detected
 22%|██▏       | 275/1261 [18:48<58:40,  3.57s/it]
Lanes were detected
 22%|██▏       | 276/1261 [18:52<1:00:18,  3.67s/it]
Lanes were detected
 22%|██▏       | 277/1261 [18:57<1:04:33,  3.94s/it]
Lanes were detected
 22%|██▏       | 278/1261 [19:01<1:03:15,  3.86s/it]
Lanes were detected
 22%|██▏       | 279/1261 [19:04<1:02:03,  3.79s/it]
Lanes were detected
 22%|██▏       | 280/1261 [19:08<1:00:51,  3.72s/it]
Lanes were detected
 22%|██▏       | 281/1261 [19:11<1:00:33,  3.71s/it]
Lanes were detected
 22%|██▏       | 282/1261 [19:15<59:08,  3.62s/it]  
Lanes were detected
 22%|██▏       | 283/1261 [19:18<57:45,  3.54s/it]
Lanes were detected
 23%|██▎       | 284/1261 [19:22<56:45,  3.49s/it]
Lanes were detected
 23%|██▎       | 285/1261 [19:25<57:09,  3.51s/it]
Lanes were detected
 23%|██▎       | 286/1261 [19:29<1:00:00,  3.69s/it]
Lanes were detected
 23%|██▎       | 287/1261 [19:33<1:01:30,  3.79s/it]
Lanes were detected
 23%|██▎       | 288/1261 [19:37<1:00:16,  3.72s/it]
Lanes were detected
 23%|██▎       | 289/1261 [19:40<59:34,  3.68s/it]  
Lanes were detected
 23%|██▎       | 290/1261 [19:44<58:20,  3.61s/it]
Lanes were detected
 23%|██▎       | 291/1261 [19:48<59:02,  3.65s/it]
Lanes were detected
 23%|██▎       | 292/1261 [19:51<59:38,  3.69s/it]
Lanes were detected
 23%|██▎       | 293/1261 [19:55<59:08,  3.67s/it]
Lanes were detected
 23%|██▎       | 294/1261 [19:58<57:16,  3.55s/it]
Lanes were detected
 23%|██▎       | 295/1261 [20:02<55:45,  3.46s/it]
Lanes were detected
 23%|██▎       | 296/1261 [20:05<54:59,  3.42s/it]
Lanes were detected
 24%|██▎       | 297/1261 [20:09<56:01,  3.49s/it]
Lanes were detected
 24%|██▎       | 298/1261 [20:12<56:46,  3.54s/it]
Lanes were detected
 24%|██▎       | 299/1261 [20:16<57:36,  3.59s/it]
Lanes were detected
 24%|██▍       | 300/1261 [20:20<58:17,  3.64s/it]
Lanes were detected
 24%|██▍       | 301/1261 [20:23<57:37,  3.60s/it]
Lanes were detected
 24%|██▍       | 302/1261 [20:27<56:46,  3.55s/it]
Lanes were detected
 24%|██▍       | 303/1261 [20:30<57:06,  3.58s/it]
Lanes were detected
 24%|██▍       | 304/1261 [20:34<56:12,  3.52s/it]
Lanes were detected
 24%|██▍       | 305/1261 [20:37<57:16,  3.59s/it]
Lanes were detected
 24%|██▍       | 306/1261 [20:41<59:40,  3.75s/it]
Lanes were detected
 24%|██▍       | 307/1261 [20:45<58:50,  3.70s/it]
Lanes were detected
 24%|██▍       | 308/1261 [20:49<58:25,  3.68s/it]
Lanes were detected
 25%|██▍       | 309/1261 [20:53<59:49,  3.77s/it]
Lanes were detected
 25%|██▍       | 310/1261 [20:57<1:00:39,  3.83s/it]
Lanes were detected
 25%|██▍       | 311/1261 [21:01<1:03:47,  4.03s/it]
Lanes were detected
 25%|██▍       | 312/1261 [21:06<1:09:52,  4.42s/it]
Lanes were detected
 25%|██▍       | 313/1261 [21:11<1:12:23,  4.58s/it]
Lanes were detected
 25%|██▍       | 314/1261 [21:17<1:19:19,  5.03s/it]
Lanes were detected
 25%|██▍       | 315/1261 [21:21<1:13:21,  4.65s/it]
Lanes were detected
 25%|██▌       | 316/1261 [21:26<1:13:39,  4.68s/it]
Lanes were detected
 25%|██▌       | 317/1261 [21:31<1:14:52,  4.76s/it]
Lanes were detected
 25%|██▌       | 318/1261 [21:35<1:13:36,  4.68s/it]
Lanes were detected
 25%|██▌       | 319/1261 [21:40<1:14:29,  4.74s/it]
Lanes were detected
 25%|██▌       | 320/1261 [21:45<1:13:24,  4.68s/it]
Lanes were detected
 25%|██▌       | 321/1261 [21:48<1:08:14,  4.36s/it]
Lanes were detected
 26%|██▌       | 322/1261 [21:52<1:04:40,  4.13s/it]
Lanes were detected
 26%|██▌       | 323/1261 [21:56<1:01:40,  3.94s/it]
Lanes were detected
 26%|██▌       | 324/1261 [21:59<59:31,  3.81s/it]  
Lanes were detected
 26%|██▌       | 325/1261 [22:03<58:14,  3.73s/it]
Lanes were detected
 26%|██▌       | 326/1261 [22:06<57:41,  3.70s/it]
Lanes were detected
 26%|██▌       | 327/1261 [22:10<56:49,  3.65s/it]
Lanes were detected
 26%|██▌       | 328/1261 [22:14<59:25,  3.82s/it]
Lanes were detected
 26%|██▌       | 329/1261 [22:18<57:53,  3.73s/it]
Lanes were detected
 26%|██▌       | 330/1261 [22:21<56:16,  3.63s/it]
Lanes were detected
 26%|██▌       | 331/1261 [22:24<55:09,  3.56s/it]
Lanes were detected
 26%|██▋       | 332/1261 [22:28<54:25,  3.52s/it]
Lanes were detected
 26%|██▋       | 333/1261 [22:32<56:19,  3.64s/it]
Lanes were detected
 26%|██▋       | 334/1261 [22:35<56:05,  3.63s/it]
Lanes were detected
 27%|██▋       | 335/1261 [22:39<56:27,  3.66s/it]
Lanes were detected
 27%|██▋       | 336/1261 [22:43<56:23,  3.66s/it]
Lanes were detected
 27%|██▋       | 337/1261 [22:47<57:08,  3.71s/it]
Lanes were detected
 27%|██▋       | 338/1261 [22:50<56:22,  3.67s/it]
Lanes were detected
 27%|██▋       | 339/1261 [22:54<55:28,  3.61s/it]
Lanes were detected
 27%|██▋       | 340/1261 [22:58<58:52,  3.84s/it]
Lanes were detected
 27%|██▋       | 341/1261 [23:02<58:57,  3.84s/it]
Lanes were detected
 27%|██▋       | 342/1261 [23:06<1:00:35,  3.96s/it]
Lanes were detected
 27%|██▋       | 343/1261 [23:10<58:45,  3.84s/it]  
Lanes were detected
 27%|██▋       | 344/1261 [23:14<59:14,  3.88s/it]
Lanes were detected
 27%|██▋       | 345/1261 [23:19<1:06:32,  4.36s/it]
Lanes were detected
 27%|██▋       | 346/1261 [23:24<1:07:49,  4.45s/it]
Lanes were detected
 28%|██▊       | 347/1261 [23:28<1:08:12,  4.48s/it]
Lanes were detected
 28%|██▊       | 348/1261 [23:33<1:09:39,  4.58s/it]
Lanes were detected
 28%|██▊       | 349/1261 [23:37<1:07:38,  4.45s/it]
Lanes were detected
 28%|██▊       | 350/1261 [23:41<1:04:38,  4.26s/it]
Lanes were detected
 28%|██▊       | 351/1261 [23:45<1:02:33,  4.12s/it]
Lanes were detected
 28%|██▊       | 352/1261 [23:49<1:02:29,  4.12s/it]
Lanes were detected
 28%|██▊       | 353/1261 [23:53<1:04:26,  4.26s/it]
Lanes were detected
 28%|██▊       | 354/1261 [23:58<1:06:47,  4.42s/it]
Lanes were detected
 28%|██▊       | 355/1261 [24:02<1:05:34,  4.34s/it]
Lanes were detected
 28%|██▊       | 356/1261 [24:06<1:01:38,  4.09s/it]
Lanes were detected
 28%|██▊       | 357/1261 [24:09<58:02,  3.85s/it]  
Lanes were detected
 28%|██▊       | 358/1261 [24:14<1:03:58,  4.25s/it]
Lanes were detected
 28%|██▊       | 359/1261 [24:19<1:06:13,  4.41s/it]
Lanes were detected
 29%|██▊       | 360/1261 [24:23<1:02:22,  4.15s/it]
Lanes were detected
 29%|██▊       | 361/1261 [24:26<1:00:25,  4.03s/it]
Lanes were detected
 29%|██▊       | 362/1261 [24:30<58:19,  3.89s/it]  
Lanes were detected
 29%|██▉       | 363/1261 [24:34<57:57,  3.87s/it]
Lanes were detected
 29%|██▉       | 364/1261 [24:37<56:19,  3.77s/it]
Lanes were detected
 29%|██▉       | 365/1261 [24:41<54:39,  3.66s/it]
Lanes were detected
 29%|██▉       | 366/1261 [24:44<54:24,  3.65s/it]
Lanes were detected
 29%|██▉       | 367/1261 [24:48<55:12,  3.71s/it]
Lanes were detected
 29%|██▉       | 368/1261 [24:52<54:16,  3.65s/it]
Lanes were detected
 29%|██▉       | 369/1261 [24:55<51:58,  3.50s/it]
Lanes were detected
 29%|██▉       | 370/1261 [24:59<52:44,  3.55s/it]
Lanes were detected
 29%|██▉       | 371/1261 [25:02<53:00,  3.57s/it]
Lanes were detected
 30%|██▉       | 372/1261 [25:06<53:12,  3.59s/it]
Lanes were detected
 30%|██▉       | 373/1261 [25:09<52:25,  3.54s/it]
Lanes were detected
 30%|██▉       | 374/1261 [25:13<52:17,  3.54s/it]
Lanes were detected
 30%|██▉       | 375/1261 [25:16<52:19,  3.54s/it]
Lanes were detected
 30%|██▉       | 376/1261 [25:20<51:12,  3.47s/it]
Lanes were detected
 30%|██▉       | 377/1261 [25:23<52:17,  3.55s/it]
Lanes were detected
 30%|██▉       | 378/1261 [25:27<53:00,  3.60s/it]
Lanes were detected
 30%|███       | 379/1261 [25:31<56:13,  3.83s/it]
Lanes were detected
 30%|███       | 380/1261 [25:35<55:57,  3.81s/it]
Lanes were detected
 30%|███       | 381/1261 [25:39<55:20,  3.77s/it]
Lanes were detected
 30%|███       | 382/1261 [25:43<54:16,  3.70s/it]
Lanes were detected
 30%|███       | 383/1261 [25:46<52:32,  3.59s/it]
Lanes were detected
 30%|███       | 384/1261 [25:49<51:13,  3.50s/it]
Lanes were detected
 31%|███       | 385/1261 [25:53<52:55,  3.62s/it]
Lanes were detected
 31%|███       | 386/1261 [25:57<53:23,  3.66s/it]
Lanes were detected
 31%|███       | 387/1261 [26:01<56:39,  3.89s/it]
Lanes were detected
 31%|███       | 388/1261 [26:05<57:55,  3.98s/it]
Lanes were detected
 31%|███       | 389/1261 [26:09<56:29,  3.89s/it]
Lanes were detected
 31%|███       | 390/1261 [26:13<54:39,  3.77s/it]
Lanes were detected
 31%|███       | 391/1261 [26:16<52:15,  3.60s/it]
Lanes were detected
 31%|███       | 392/1261 [26:19<51:32,  3.56s/it]
Lanes were detected
 31%|███       | 393/1261 [26:23<52:57,  3.66s/it]
Lanes were detected
 31%|███       | 394/1261 [26:29<1:02:02,  4.29s/it]
Lanes were detected
 31%|███▏      | 395/1261 [26:35<1:10:57,  4.92s/it]
Lanes were detected
 31%|███▏      | 396/1261 [26:40<1:08:31,  4.75s/it]
Lanes were detected
 31%|███▏      | 397/1261 [26:43<1:03:02,  4.38s/it]
Lanes were detected
 32%|███▏      | 398/1261 [26:47<1:00:25,  4.20s/it]
Lanes were detected
 32%|███▏      | 399/1261 [26:50<57:27,  4.00s/it]  
Lanes were detected
 32%|███▏      | 400/1261 [26:54<56:13,  3.92s/it]
Lanes were detected
 32%|███▏      | 401/1261 [26:58<56:24,  3.94s/it]
Lanes were detected
 32%|███▏      | 402/1261 [27:02<55:17,  3.86s/it]
Lanes were detected
 32%|███▏      | 403/1261 [27:05<53:56,  3.77s/it]
Lanes were detected
 32%|███▏      | 404/1261 [27:09<52:58,  3.71s/it]
Lanes were detected
 32%|███▏      | 405/1261 [27:13<52:40,  3.69s/it]
Lanes were detected
 32%|███▏      | 406/1261 [27:16<51:19,  3.60s/it]
Lanes were detected
 32%|███▏      | 407/1261 [27:19<50:22,  3.54s/it]
Lanes were detected
 32%|███▏      | 408/1261 [27:23<49:42,  3.50s/it]
Lanes were detected
 32%|███▏      | 409/1261 [27:26<48:12,  3.40s/it]
Lanes were detected
 33%|███▎      | 410/1261 [27:29<48:01,  3.39s/it]
Lanes were detected
 33%|███▎      | 411/1261 [27:33<47:29,  3.35s/it]
Lanes were detected
 33%|███▎      | 412/1261 [27:36<46:30,  3.29s/it]
Lanes were detected
 33%|███▎      | 413/1261 [27:39<45:49,  3.24s/it]
Lanes were detected
 33%|███▎      | 414/1261 [27:42<45:13,  3.20s/it]
Lanes were detected
 33%|███▎      | 415/1261 [27:45<44:51,  3.18s/it]
Lanes were detected
 33%|███▎      | 416/1261 [27:48<44:31,  3.16s/it]
Lanes were detected
 33%|███▎      | 417/1261 [27:51<44:16,  3.15s/it]
Lanes were detected
 33%|███▎      | 418/1261 [27:54<44:05,  3.14s/it]
Lanes were detected
 33%|███▎      | 419/1261 [27:58<44:03,  3.14s/it]
Lanes were detected
 33%|███▎      | 420/1261 [28:01<44:01,  3.14s/it]
Lanes were detected
 33%|███▎      | 421/1261 [28:04<44:00,  3.14s/it]
Lanes were detected
 33%|███▎      | 422/1261 [28:07<43:54,  3.14s/it]
Lanes were detected
 34%|███▎      | 423/1261 [28:10<43:41,  3.13s/it]
Lanes were detected
 34%|███▎      | 424/1261 [28:13<43:34,  3.12s/it]
Lanes were detected
 34%|███▎      | 425/1261 [28:16<43:28,  3.12s/it]
Lanes were detected
 34%|███▍      | 426/1261 [28:20<43:32,  3.13s/it]
Lanes were detected
 34%|███▍      | 427/1261 [28:23<43:24,  3.12s/it]
Lanes were detected
 34%|███▍      | 428/1261 [28:26<43:14,  3.11s/it]
Lanes were detected
 34%|███▍      | 429/1261 [28:29<43:53,  3.17s/it]
Lanes were detected
 34%|███▍      | 430/1261 [28:32<43:45,  3.16s/it]
Lanes were detected
 34%|███▍      | 431/1261 [28:35<43:38,  3.15s/it]
Lanes were detected
 34%|███▍      | 432/1261 [28:38<43:31,  3.15s/it]
Lanes were detected
 34%|███▍      | 433/1261 [28:42<43:39,  3.16s/it]
Lanes were detected
 34%|███▍      | 434/1261 [28:45<43:36,  3.16s/it]
Lanes were detected
 34%|███▍      | 435/1261 [28:48<44:50,  3.26s/it]
Lanes were detected
 35%|███▍      | 436/1261 [28:52<45:45,  3.33s/it]
Lanes were detected
 35%|███▍      | 437/1261 [28:55<46:14,  3.37s/it]
Lanes were detected
 35%|███▍      | 438/1261 [28:59<46:06,  3.36s/it]
Lanes were detected
 35%|███▍      | 439/1261 [29:02<46:04,  3.36s/it]
Lanes were detected
 35%|███▍      | 440/1261 [29:05<45:36,  3.33s/it]
Lanes were detected
 35%|███▍      | 441/1261 [29:08<44:51,  3.28s/it]
Lanes were detected
 35%|███▌      | 442/1261 [29:12<44:29,  3.26s/it]
Lanes were detected
 35%|███▌      | 443/1261 [29:15<44:32,  3.27s/it]
Lanes were detected
 35%|███▌      | 444/1261 [29:18<44:38,  3.28s/it]
Lanes were detected
 35%|███▌      | 445/1261 [29:21<44:07,  3.24s/it]
Lanes were detected
 35%|███▌      | 446/1261 [29:24<43:48,  3.22s/it]
Lanes were detected
 35%|███▌      | 447/1261 [29:28<44:55,  3.31s/it]
Lanes were detected
 36%|███▌      | 448/1261 [29:32<46:51,  3.46s/it]
Lanes were detected
 36%|███▌      | 449/1261 [29:35<46:24,  3.43s/it]
Lanes were detected
 36%|███▌      | 450/1261 [29:39<46:15,  3.42s/it]
Lanes were detected
 36%|███▌      | 451/1261 [29:42<45:17,  3.35s/it]
Lanes were detected
 36%|███▌      | 452/1261 [29:45<44:08,  3.27s/it]
Lanes were detected
 36%|███▌      | 453/1261 [29:48<43:31,  3.23s/it]
Lanes were detected
 36%|███▌      | 454/1261 [29:51<42:50,  3.18s/it]
Lanes were detected
 36%|███▌      | 455/1261 [29:54<42:20,  3.15s/it]
Lanes were detected
 36%|███▌      | 456/1261 [29:57<42:03,  3.14s/it]
Lanes were detected
 36%|███▌      | 457/1261 [30:00<41:49,  3.12s/it]
Lanes were detected
 36%|███▋      | 458/1261 [30:03<41:39,  3.11s/it]
Lanes were detected
 36%|███▋      | 459/1261 [30:07<41:36,  3.11s/it]
Lanes were detected
 36%|███▋      | 460/1261 [30:10<41:31,  3.11s/it]
Lanes were detected
 37%|███▋      | 461/1261 [30:13<41:26,  3.11s/it]
Lanes were detected
 37%|███▋      | 462/1261 [30:16<41:24,  3.11s/it]
Lanes were detected
 37%|███▋      | 463/1261 [30:19<41:23,  3.11s/it]
Lanes were detected
 37%|███▋      | 464/1261 [30:22<41:05,  3.09s/it]
Lanes were detected
 37%|███▋      | 465/1261 [30:25<40:59,  3.09s/it]
Lanes were detected
 37%|███▋      | 466/1261 [30:28<41:09,  3.11s/it]
Lanes were detected
 37%|███▋      | 467/1261 [30:31<41:00,  3.10s/it]
Lanes were detected
 37%|███▋      | 468/1261 [30:34<40:47,  3.09s/it]
Lanes were detected
 37%|███▋      | 469/1261 [30:37<40:45,  3.09s/it]
Lanes were detected
 37%|███▋      | 470/1261 [30:41<40:39,  3.08s/it]
Lanes were detected
 37%|███▋      | 471/1261 [30:44<40:34,  3.08s/it]
Lanes were detected
 37%|███▋      | 472/1261 [30:47<40:34,  3.09s/it]
Lanes were detected
 38%|███▊      | 473/1261 [30:50<40:27,  3.08s/it]
Lanes were detected
 38%|███▊      | 474/1261 [30:53<40:22,  3.08s/it]
Lanes were detected
 38%|███▊      | 475/1261 [30:56<40:19,  3.08s/it]
Lanes were detected
 38%|███▊      | 476/1261 [30:59<40:16,  3.08s/it]
Lanes were detected
 38%|███▊      | 477/1261 [31:02<40:16,  3.08s/it]
Lanes were detected
 38%|███▊      | 478/1261 [31:05<40:12,  3.08s/it]
Lanes were detected
 38%|███▊      | 479/1261 [31:08<40:09,  3.08s/it]
Lanes were detected
 38%|███▊      | 480/1261 [31:11<40:08,  3.08s/it]
Lanes were detected
 38%|███▊      | 481/1261 [31:14<40:12,  3.09s/it]
Lanes were detected
 38%|███▊      | 482/1261 [31:18<40:12,  3.10s/it]
Lanes were detected
 38%|███▊      | 483/1261 [31:21<40:07,  3.10s/it]
Lanes were detected
 38%|███▊      | 484/1261 [31:24<39:56,  3.08s/it]
Lanes were detected
 38%|███▊      | 485/1261 [31:27<39:53,  3.08s/it]
Lanes were detected
 39%|███▊      | 486/1261 [31:30<41:38,  3.22s/it]
Lanes were detected
 39%|███▊      | 487/1261 [31:34<41:25,  3.21s/it]
Lanes were detected
 39%|███▊      | 488/1261 [31:37<41:04,  3.19s/it]
Lanes were detected
 39%|███▉      | 489/1261 [31:40<40:37,  3.16s/it]
Lanes were detected
 39%|███▉      | 490/1261 [31:43<40:18,  3.14s/it]
Lanes were detected
 39%|███▉      | 491/1261 [31:46<40:04,  3.12s/it]
Lanes were detected
 39%|███▉      | 492/1261 [31:49<39:52,  3.11s/it]
Lanes were detected
 39%|███▉      | 493/1261 [31:52<39:41,  3.10s/it]
Lanes were detected
 39%|███▉      | 494/1261 [31:55<39:29,  3.09s/it]
Lanes were detected
 39%|███▉      | 495/1261 [31:58<39:22,  3.08s/it]
Lanes were detected
 39%|███▉      | 496/1261 [32:01<39:15,  3.08s/it]
Lanes were detected
 39%|███▉      | 497/1261 [32:04<39:11,  3.08s/it]
Lanes were detected
 39%|███▉      | 498/1261 [32:07<39:05,  3.07s/it]
Lanes were detected
 40%|███▉      | 499/1261 [32:11<39:03,  3.08s/it]
Lanes were detected
 40%|███▉      | 500/1261 [32:14<38:59,  3.07s/it]
Lanes were detected
 40%|███▉      | 501/1261 [32:17<38:57,  3.08s/it]
Lanes were detected
 40%|███▉      | 502/1261 [32:20<38:52,  3.07s/it]
Lanes were detected
 40%|███▉      | 503/1261 [32:23<38:48,  3.07s/it]
Lanes were detected
 40%|███▉      | 504/1261 [32:26<38:47,  3.07s/it]
Lanes were detected
 40%|████      | 505/1261 [32:29<38:53,  3.09s/it]
Lanes were detected
 40%|████      | 506/1261 [32:32<38:55,  3.09s/it]
Lanes were detected
 40%|████      | 507/1261 [32:35<38:50,  3.09s/it]
Lanes were detected
 40%|████      | 508/1261 [32:38<38:45,  3.09s/it]
Lanes were detected
 40%|████      | 509/1261 [32:41<38:48,  3.10s/it]
Lanes were detected
 40%|████      | 510/1261 [32:44<38:40,  3.09s/it]
Lanes were detected
 41%|████      | 511/1261 [32:48<39:00,  3.12s/it]
Lanes were detected
 41%|████      | 512/1261 [32:51<38:55,  3.12s/it]
Lanes were detected
 41%|████      | 513/1261 [32:54<38:47,  3.11s/it]
Lanes were detected
 41%|████      | 514/1261 [32:57<38:45,  3.11s/it]
Lanes were detected
 41%|████      | 515/1261 [33:00<38:50,  3.12s/it]
Lanes were detected
 41%|████      | 516/1261 [33:03<38:36,  3.11s/it]
Lanes were detected
 41%|████      | 517/1261 [33:06<38:30,  3.11s/it]
Lanes were detected
 41%|████      | 518/1261 [33:09<38:17,  3.09s/it]
Lanes were detected
 41%|████      | 519/1261 [33:12<38:14,  3.09s/it]
Lanes were detected
 41%|████      | 520/1261 [33:16<38:12,  3.09s/it]
Lanes were detected
 41%|████▏     | 521/1261 [33:19<38:12,  3.10s/it]
Lanes were detected
 41%|████▏     | 522/1261 [33:22<38:08,  3.10s/it]
Lanes were detected
 41%|████▏     | 523/1261 [33:25<38:03,  3.09s/it]
Lanes were detected
 42%|████▏     | 524/1261 [33:28<38:42,  3.15s/it]
Lanes were detected
 42%|████▏     | 525/1261 [33:32<39:28,  3.22s/it]
Lanes were detected
 42%|████▏     | 526/1261 [33:35<38:52,  3.17s/it]
Lanes were detected
 42%|████▏     | 527/1261 [33:38<38:51,  3.18s/it]
Lanes were detected
 42%|████▏     | 528/1261 [33:41<38:25,  3.15s/it]
Lanes were detected
 42%|████▏     | 529/1261 [33:44<38:06,  3.12s/it]
Lanes were detected
 42%|████▏     | 530/1261 [33:47<37:57,  3.11s/it]
Lanes were detected
 42%|████▏     | 531/1261 [33:50<37:45,  3.10s/it]
Lanes were detected
 42%|████▏     | 532/1261 [33:53<37:38,  3.10s/it]
Lanes were detected
 42%|████▏     | 533/1261 [33:57<38:39,  3.19s/it]
Lanes were detected
 42%|████▏     | 534/1261 [34:00<38:20,  3.16s/it]
Lanes were detected
 42%|████▏     | 535/1261 [34:03<38:02,  3.14s/it]
Lanes were detected
 43%|████▎     | 536/1261 [34:06<37:49,  3.13s/it]
Lanes were detected
 43%|████▎     | 537/1261 [34:09<37:33,  3.11s/it]
Lanes were detected
 43%|████▎     | 538/1261 [34:12<37:51,  3.14s/it]
Lanes were detected
 43%|████▎     | 539/1261 [34:15<37:34,  3.12s/it]
Lanes were detected
 43%|████▎     | 540/1261 [34:18<37:22,  3.11s/it]
Lanes were detected
 43%|████▎     | 541/1261 [34:21<37:14,  3.10s/it]
Lanes were detected
 43%|████▎     | 542/1261 [34:24<37:06,  3.10s/it]
Lanes were detected
 43%|████▎     | 543/1261 [34:28<37:02,  3.10s/it]
Lanes were detected
 43%|████▎     | 544/1261 [34:31<36:54,  3.09s/it]
Lanes were detected
 43%|████▎     | 545/1261 [34:34<36:46,  3.08s/it]
Lanes weren't detected
 43%|████▎     | 546/1261 [34:37<36:45,  3.09s/it]
Lanes weren't detected
 43%|████▎     | 547/1261 [34:40<36:40,  3.08s/it]
Lanes weren't detected
 43%|████▎     | 548/1261 [34:43<36:41,  3.09s/it]
Lanes weren't detected
 44%|████▎     | 549/1261 [34:46<37:49,  3.19s/it]
Lanes weren't detected
 44%|████▎     | 550/1261 [34:49<37:17,  3.15s/it]
Lanes were detected
 44%|████▎     | 551/1261 [34:53<37:05,  3.13s/it]
Lanes were detected
 44%|████▍     | 552/1261 [34:56<36:47,  3.11s/it]
Lanes were detected
 44%|████▍     | 553/1261 [34:59<36:42,  3.11s/it]
Lanes were detected
 44%|████▍     | 554/1261 [35:02<36:31,  3.10s/it]
Lanes weren't detected
 44%|████▍     | 555/1261 [35:05<36:21,  3.09s/it]
Lanes weren't detected
 44%|████▍     | 556/1261 [35:08<36:12,  3.08s/it]
Lanes were detected
 44%|████▍     | 557/1261 [35:11<36:04,  3.07s/it]
Lanes were detected
 44%|████▍     | 558/1261 [35:14<35:58,  3.07s/it]
Lanes weren't detected
 44%|████▍     | 559/1261 [35:17<36:01,  3.08s/it]
Lanes weren't detected
 44%|████▍     | 560/1261 [35:20<35:58,  3.08s/it]
Lanes weren't detected
 44%|████▍     | 561/1261 [35:23<35:55,  3.08s/it]
Lanes weren't detected
 45%|████▍     | 562/1261 [35:26<35:52,  3.08s/it]
Lanes weren't detected
 45%|████▍     | 563/1261 [35:30<37:01,  3.18s/it]
Lanes were detected
 45%|████▍     | 564/1261 [35:33<37:01,  3.19s/it]
Lanes were detected
 45%|████▍     | 565/1261 [35:36<36:43,  3.17s/it]
Lanes were detected
 45%|████▍     | 566/1261 [35:39<36:24,  3.14s/it]
Lanes were detected
 45%|████▍     | 567/1261 [35:42<36:05,  3.12s/it]
Lanes were detected
 45%|████▌     | 568/1261 [35:45<35:51,  3.10s/it]
Lanes were detected
 45%|████▌     | 569/1261 [35:48<35:42,  3.10s/it]
Lanes weren't detected
 45%|████▌     | 570/1261 [35:51<35:28,  3.08s/it]
Lanes were detected
 45%|████▌     | 571/1261 [35:55<35:22,  3.08s/it]
Lanes were detected
 45%|████▌     | 572/1261 [35:58<35:17,  3.07s/it]
Lanes were detected
 45%|████▌     | 573/1261 [36:01<35:15,  3.07s/it]
Lanes were detected
 46%|████▌     | 574/1261 [36:04<35:08,  3.07s/it]
Lanes were detected
 46%|████▌     | 575/1261 [36:07<35:06,  3.07s/it]
Lanes were detected
 46%|████▌     | 576/1261 [36:10<34:59,  3.06s/it]
Lanes were detected
 46%|████▌     | 577/1261 [36:13<35:03,  3.08s/it]
Lanes were detected
 46%|████▌     | 578/1261 [36:16<34:58,  3.07s/it]
Lanes were detected
 46%|████▌     | 579/1261 [36:19<34:57,  3.08s/it]
Lanes were detected
 46%|████▌     | 580/1261 [36:22<34:50,  3.07s/it]
Lanes were detected
 46%|████▌     | 581/1261 [36:25<34:49,  3.07s/it]
Lanes were detected
 46%|████▌     | 582/1261 [36:28<34:47,  3.07s/it]
Lanes were detected
 46%|████▌     | 583/1261 [36:31<34:41,  3.07s/it]
Lanes were detected
 46%|████▋     | 584/1261 [36:34<34:39,  3.07s/it]
Lanes were detected
 46%|████▋     | 585/1261 [36:38<34:40,  3.08s/it]
Lanes were detected
 46%|████▋     | 586/1261 [36:41<34:36,  3.08s/it]
Lanes were detected
 47%|████▋     | 587/1261 [36:44<34:36,  3.08s/it]
Lanes were detected
 47%|████▋     | 588/1261 [36:47<34:33,  3.08s/it]
Lanes were detected
 47%|████▋     | 589/1261 [36:50<34:26,  3.08s/it]
Lanes were detected
 47%|████▋     | 590/1261 [36:53<34:18,  3.07s/it]
Lanes were detected
 47%|████▋     | 591/1261 [36:56<34:13,  3.06s/it]
Lanes were detected
 47%|████▋     | 592/1261 [36:59<34:12,  3.07s/it]
Lanes were detected
 47%|████▋     | 593/1261 [37:02<34:12,  3.07s/it]
Lanes were detected
 47%|████▋     | 594/1261 [37:05<34:17,  3.08s/it]
Lanes were detected
 47%|████▋     | 595/1261 [37:08<34:18,  3.09s/it]
Lanes were detected
 47%|████▋     | 596/1261 [37:11<34:24,  3.10s/it]
Lanes were detected
 47%|████▋     | 597/1261 [37:15<34:29,  3.12s/it]
Lanes were detected
 47%|████▋     | 598/1261 [37:18<34:30,  3.12s/it]
Lanes were detected
 48%|████▊     | 599/1261 [37:21<34:19,  3.11s/it]
Lanes were detected
 48%|████▊     | 600/1261 [37:24<34:13,  3.11s/it]
Lanes were detected
 48%|████▊     | 601/1261 [37:27<34:11,  3.11s/it]
Lanes were detected
 48%|████▊     | 602/1261 [37:30<35:03,  3.19s/it]
Lanes were detected
 48%|████▊     | 603/1261 [37:34<34:55,  3.18s/it]
Lanes were detected
 48%|████▊     | 604/1261 [37:37<34:42,  3.17s/it]
Lanes were detected
 48%|████▊     | 605/1261 [37:40<34:24,  3.15s/it]
Lanes were detected
 48%|████▊     | 606/1261 [37:43<34:07,  3.13s/it]
Lanes were detected
 48%|████▊     | 607/1261 [37:46<33:54,  3.11s/it]
Lanes were detected
 48%|████▊     | 608/1261 [37:49<33:44,  3.10s/it]
Lanes were detected
 48%|████▊     | 609/1261 [37:52<33:38,  3.10s/it]
Lanes were detected
 48%|████▊     | 610/1261 [37:55<33:34,  3.09s/it]
Lanes were detected
 48%|████▊     | 611/1261 [37:58<34:02,  3.14s/it]
Lanes were detected
 49%|████▊     | 612/1261 [38:02<34:21,  3.18s/it]
Lanes were detected
 49%|████▊     | 613/1261 [38:05<34:33,  3.20s/it]
Lanes were detected
 49%|████▊     | 614/1261 [38:08<34:32,  3.20s/it]
Lanes were detected
 49%|████▉     | 615/1261 [38:11<34:16,  3.18s/it]
Lanes were detected
 49%|████▉     | 616/1261 [38:14<33:58,  3.16s/it]
Lanes were detected
 49%|████▉     | 617/1261 [38:18<33:47,  3.15s/it]
Lanes were detected
 49%|████▉     | 618/1261 [38:21<33:34,  3.13s/it]
Lanes were detected
 49%|████▉     | 619/1261 [38:24<33:19,  3.12s/it]
Lanes were detected
 49%|████▉     | 620/1261 [38:27<33:14,  3.11s/it]
Lanes were detected
 49%|████▉     | 621/1261 [38:30<33:05,  3.10s/it]
Lanes were detected
 49%|████▉     | 622/1261 [38:33<33:01,  3.10s/it]
Lanes were detected
 49%|████▉     | 623/1261 [38:36<32:55,  3.10s/it]
Lanes were detected
 49%|████▉     | 624/1261 [38:39<32:56,  3.10s/it]
Lanes were detected
 50%|████▉     | 625/1261 [38:42<32:59,  3.11s/it]
Lanes were detected
 50%|████▉     | 626/1261 [38:45<32:57,  3.11s/it]
Lanes were detected
 50%|████▉     | 627/1261 [38:49<32:50,  3.11s/it]
Lanes were detected
 50%|████▉     | 628/1261 [38:52<32:44,  3.10s/it]
Lanes were detected
 50%|████▉     | 629/1261 [38:55<32:47,  3.11s/it]
Lanes were detected
 50%|████▉     | 630/1261 [38:58<33:35,  3.19s/it]
Lanes were detected
 50%|█████     | 631/1261 [39:01<33:17,  3.17s/it]
Lanes were detected
 50%|█████     | 632/1261 [39:04<32:59,  3.15s/it]
Lanes were detected
 50%|█████     | 633/1261 [39:08<32:53,  3.14s/it]
Lanes were detected
 50%|█████     | 634/1261 [39:11<32:45,  3.13s/it]
Lanes were detected
 50%|█████     | 635/1261 [39:14<32:47,  3.14s/it]
Lanes were detected
 50%|█████     | 636/1261 [39:17<32:34,  3.13s/it]
Lanes were detected
 51%|█████     | 637/1261 [39:20<32:24,  3.12s/it]
Lanes were detected
 51%|█████     | 638/1261 [39:23<32:16,  3.11s/it]
Lanes were detected
 51%|█████     | 639/1261 [39:26<32:10,  3.10s/it]
Lanes were detected
 51%|█████     | 640/1261 [39:30<32:51,  3.18s/it]
Lanes were detected
 51%|█████     | 641/1261 [39:33<33:07,  3.21s/it]
Lanes were detected
 51%|█████     | 642/1261 [39:36<32:48,  3.18s/it]
Lanes were detected
 51%|█████     | 643/1261 [39:39<32:35,  3.16s/it]
Lanes were detected
 51%|█████     | 644/1261 [39:42<32:17,  3.14s/it]
Lanes were detected
 51%|█████     | 645/1261 [39:45<32:01,  3.12s/it]
Lanes were detected
 51%|█████     | 646/1261 [39:48<31:49,  3.11s/it]
Lanes were detected
 51%|█████▏    | 647/1261 [39:51<31:45,  3.10s/it]
Lanes were detected
 51%|█████▏    | 648/1261 [39:54<31:37,  3.09s/it]
Lanes were detected
 51%|█████▏    | 649/1261 [39:58<31:35,  3.10s/it]
Lanes were detected
 52%|█████▏    | 650/1261 [40:01<31:25,  3.09s/it]
Lanes were detected
 52%|█████▏    | 651/1261 [40:04<31:17,  3.08s/it]
Lanes were detected
 52%|█████▏    | 652/1261 [40:07<31:14,  3.08s/it]
Lanes were detected
 52%|█████▏    | 653/1261 [40:10<31:08,  3.07s/it]
Lanes were detected
 52%|█████▏    | 654/1261 [40:13<31:01,  3.07s/it]
Lanes were detected
 52%|█████▏    | 655/1261 [40:16<30:55,  3.06s/it]
Lanes were detected
 52%|█████▏    | 656/1261 [40:19<30:50,  3.06s/it]
Lanes were detected
 52%|█████▏    | 657/1261 [40:22<30:46,  3.06s/it]
Lanes were detected
 52%|█████▏    | 658/1261 [40:25<30:42,  3.06s/it]
Lanes were detected
 52%|█████▏    | 659/1261 [40:28<30:39,  3.06s/it]
Lanes were detected
 52%|█████▏    | 660/1261 [40:31<30:36,  3.06s/it]
Lanes were detected
 52%|█████▏    | 661/1261 [40:34<30:35,  3.06s/it]
Lanes were detected
 52%|█████▏    | 662/1261 [40:37<30:37,  3.07s/it]
Lanes were detected
 53%|█████▎    | 663/1261 [40:40<30:30,  3.06s/it]
Lanes were detected
 53%|█████▎    | 664/1261 [40:43<30:26,  3.06s/it]
Lanes were detected
 53%|█████▎    | 665/1261 [40:46<30:20,  3.06s/it]
Lanes were detected
 53%|█████▎    | 666/1261 [40:50<30:17,  3.05s/it]
Lanes were detected
 53%|█████▎    | 667/1261 [40:53<30:13,  3.05s/it]
Lanes were detected
 53%|█████▎    | 668/1261 [40:56<30:11,  3.05s/it]
Lanes were detected
 53%|█████▎    | 669/1261 [40:59<30:08,  3.05s/it]
Lanes were detected
 53%|█████▎    | 670/1261 [41:02<30:06,  3.06s/it]
Lanes were detected
 53%|█████▎    | 671/1261 [41:05<30:04,  3.06s/it]
Lanes were detected
 53%|█████▎    | 672/1261 [41:08<30:02,  3.06s/it]
Lanes were detected
 53%|█████▎    | 673/1261 [41:11<29:59,  3.06s/it]
Lanes were detected
 53%|█████▎    | 674/1261 [41:14<29:58,  3.06s/it]
Lanes were detected
 54%|█████▎    | 675/1261 [41:17<29:52,  3.06s/it]
Lanes were detected
 54%|█████▎    | 676/1261 [41:20<29:54,  3.07s/it]
Lanes were detected
 54%|█████▎    | 677/1261 [41:23<29:51,  3.07s/it]
Lanes were detected
 54%|█████▍    | 678/1261 [41:26<29:47,  3.07s/it]
Lanes were detected
 54%|█████▍    | 679/1261 [41:30<30:29,  3.14s/it]
Lanes were detected
 54%|█████▍    | 680/1261 [41:33<30:45,  3.18s/it]
Lanes were detected
 54%|█████▍    | 681/1261 [41:36<30:26,  3.15s/it]
Lanes were detected
 54%|█████▍    | 682/1261 [41:39<30:17,  3.14s/it]
Lanes were detected
 54%|█████▍    | 683/1261 [41:42<30:03,  3.12s/it]
Lanes were detected
 54%|█████▍    | 684/1261 [41:45<29:49,  3.10s/it]
Lanes were detected
 54%|█████▍    | 685/1261 [41:48<29:37,  3.09s/it]
Lanes were detected
 54%|█████▍    | 686/1261 [41:51<29:30,  3.08s/it]
Lanes were detected
 54%|█████▍    | 687/1261 [41:54<29:21,  3.07s/it]
Lanes were detected
 55%|█████▍    | 688/1261 [41:57<29:21,  3.07s/it]
Lanes were detected
 55%|█████▍    | 689/1261 [42:01<29:15,  3.07s/it]
Lanes were detected
 55%|█████▍    | 690/1261 [42:04<29:16,  3.08s/it]
Lanes were detected
 55%|█████▍    | 691/1261 [42:07<29:14,  3.08s/it]
Lanes were detected
 55%|█████▍    | 692/1261 [42:10<29:15,  3.08s/it]
Lanes were detected
 55%|█████▍    | 693/1261 [42:13<29:12,  3.09s/it]
Lanes were detected
 55%|█████▌    | 694/1261 [42:16<29:06,  3.08s/it]
Lanes were detected
 55%|█████▌    | 695/1261 [42:19<29:00,  3.07s/it]
Lanes were detected
 55%|█████▌    | 696/1261 [42:22<28:56,  3.07s/it]
Lanes were detected
 55%|█████▌    | 697/1261 [42:25<28:51,  3.07s/it]
Lanes were detected
 55%|█████▌    | 698/1261 [42:28<28:49,  3.07s/it]
Lanes were detected
 55%|█████▌    | 699/1261 [42:31<28:46,  3.07s/it]
Lanes were detected
 56%|█████▌    | 700/1261 [42:34<28:44,  3.07s/it]
Lanes were detected
 56%|█████▌    | 701/1261 [42:37<28:40,  3.07s/it]
Lanes were detected
 56%|█████▌    | 702/1261 [42:41<28:41,  3.08s/it]
Lanes were detected
 56%|█████▌    | 703/1261 [42:44<28:38,  3.08s/it]
Lanes were detected
 56%|█████▌    | 704/1261 [42:47<28:38,  3.09s/it]
Lanes were detected
 56%|█████▌    | 705/1261 [42:50<28:38,  3.09s/it]
Lanes were detected
 56%|█████▌    | 706/1261 [42:53<28:36,  3.09s/it]
Lanes were detected
 56%|█████▌    | 707/1261 [42:56<28:34,  3.10s/it]
Lanes were detected
 56%|█████▌    | 708/1261 [42:59<28:37,  3.11s/it]
Lanes were detected
 56%|█████▌    | 709/1261 [43:02<28:33,  3.10s/it]
Lanes were detected
 56%|█████▋    | 710/1261 [43:05<28:31,  3.11s/it]
Lanes were detected
 56%|█████▋    | 711/1261 [43:08<28:26,  3.10s/it]
Lanes were detected
 56%|█████▋    | 712/1261 [43:12<28:21,  3.10s/it]
Lanes were detected
 57%|█████▋    | 713/1261 [43:15<28:12,  3.09s/it]
Lanes were detected
 57%|█████▋    | 714/1261 [43:18<28:10,  3.09s/it]
Lanes were detected
 57%|█████▋    | 715/1261 [43:21<28:04,  3.08s/it]
Lanes were detected
 57%|█████▋    | 716/1261 [43:24<27:56,  3.08s/it]
Lanes were detected
 57%|█████▋    | 717/1261 [43:27<27:56,  3.08s/it]
Lanes were detected
 57%|█████▋    | 718/1261 [43:30<28:48,  3.18s/it]
Lanes were detected
 57%|█████▋    | 719/1261 [43:33<28:43,  3.18s/it]
Lanes were detected
 57%|█████▋    | 720/1261 [43:37<28:29,  3.16s/it]
Lanes were detected
 57%|█████▋    | 721/1261 [43:40<28:27,  3.16s/it]
Lanes were detected
 57%|█████▋    | 722/1261 [43:43<28:10,  3.14s/it]
Lanes were detected
 57%|█████▋    | 723/1261 [43:46<28:00,  3.12s/it]
Lanes were detected
 57%|█████▋    | 724/1261 [43:49<27:50,  3.11s/it]
Lanes were detected
 57%|█████▋    | 725/1261 [43:52<27:40,  3.10s/it]
Lanes were detected
 58%|█████▊    | 726/1261 [43:55<28:02,  3.14s/it]
Lanes were detected
 58%|█████▊    | 727/1261 [43:58<27:53,  3.13s/it]
Lanes were detected
 58%|█████▊    | 728/1261 [44:02<27:44,  3.12s/it]
Lanes were detected
 58%|█████▊    | 729/1261 [44:05<27:36,  3.11s/it]
Lanes were detected
 58%|█████▊    | 730/1261 [44:08<27:31,  3.11s/it]
Lanes were detected
 58%|█████▊    | 731/1261 [44:11<27:34,  3.12s/it]
Lanes were detected
 58%|█████▊    | 732/1261 [44:14<27:29,  3.12s/it]
Lanes were detected
 58%|█████▊    | 733/1261 [44:17<27:32,  3.13s/it]
Lanes were detected
 58%|█████▊    | 734/1261 [44:20<27:25,  3.12s/it]
Lanes were detected
 58%|█████▊    | 735/1261 [44:23<27:19,  3.12s/it]
Lanes were detected
 58%|█████▊    | 736/1261 [44:26<27:13,  3.11s/it]
Lanes were detected
 58%|█████▊    | 737/1261 [44:30<27:11,  3.11s/it]
Lanes were detected
 59%|█████▊    | 738/1261 [44:33<27:10,  3.12s/it]
Lanes were detected
 59%|█████▊    | 739/1261 [44:36<27:07,  3.12s/it]
Lanes were detected
 59%|█████▊    | 740/1261 [44:39<27:08,  3.13s/it]
Lanes were detected
 59%|█████▉    | 741/1261 [44:42<27:07,  3.13s/it]
Lanes were detected
 59%|█████▉    | 742/1261 [44:45<27:19,  3.16s/it]
Lanes were detected
 59%|█████▉    | 743/1261 [44:48<27:14,  3.16s/it]
Lanes were detected
 59%|█████▉    | 744/1261 [44:52<27:01,  3.14s/it]
Lanes were detected
 59%|█████▉    | 745/1261 [44:55<26:54,  3.13s/it]
Lanes were detected
 59%|█████▉    | 746/1261 [44:58<27:10,  3.17s/it]
Lanes were detected
 59%|█████▉    | 747/1261 [45:01<27:07,  3.17s/it]
Lanes were detected
 59%|█████▉    | 748/1261 [45:04<26:55,  3.15s/it]
Lanes were detected
 59%|█████▉    | 749/1261 [45:07<26:44,  3.13s/it]
Lanes were detected
 59%|█████▉    | 750/1261 [45:11<26:51,  3.15s/it]
Lanes were detected
 60%|█████▉    | 751/1261 [45:14<26:47,  3.15s/it]
Lanes were detected
 60%|█████▉    | 752/1261 [45:17<26:35,  3.13s/it]
Lanes were detected
 60%|█████▉    | 753/1261 [45:20<26:26,  3.12s/it]
Lanes were detected
 60%|█████▉    | 754/1261 [45:23<26:16,  3.11s/it]
Lanes were detected
 60%|█████▉    | 755/1261 [45:26<26:09,  3.10s/it]
Lanes were detected
 60%|█████▉    | 756/1261 [45:29<26:50,  3.19s/it]
Lanes were detected
 60%|██████    | 757/1261 [45:33<26:55,  3.21s/it]
Lanes were detected
 60%|██████    | 758/1261 [45:36<26:40,  3.18s/it]
Lanes were detected
 60%|██████    | 759/1261 [45:39<26:28,  3.16s/it]
Lanes were detected
 60%|██████    | 760/1261 [45:42<26:14,  3.14s/it]
Lanes were detected
 60%|██████    | 761/1261 [45:45<25:57,  3.11s/it]
Lanes were detected
 60%|██████    | 762/1261 [45:48<25:49,  3.11s/it]
Lanes were detected
 61%|██████    | 763/1261 [45:51<25:46,  3.10s/it]
Lanes were detected
 61%|██████    | 764/1261 [45:54<25:39,  3.10s/it]
Lanes were detected
 61%|██████    | 765/1261 [45:57<25:39,  3.10s/it]
Lanes were detected
 61%|██████    | 766/1261 [46:01<25:32,  3.10s/it]
Lanes were detected
 61%|██████    | 767/1261 [46:04<25:27,  3.09s/it]
Lanes were detected
 61%|██████    | 768/1261 [46:07<25:21,  3.09s/it]
Lanes were detected
 61%|██████    | 769/1261 [46:10<25:16,  3.08s/it]
Lanes were detected
 61%|██████    | 770/1261 [46:13<25:12,  3.08s/it]
Lanes were detected
 61%|██████    | 771/1261 [46:16<25:08,  3.08s/it]
Lanes were detected
 61%|██████    | 772/1261 [46:19<25:07,  3.08s/it]
Lanes were detected
 61%|██████▏   | 773/1261 [46:22<25:10,  3.10s/it]
Lanes were detected
 61%|██████▏   | 774/1261 [46:25<25:05,  3.09s/it]
Lanes were detected
 61%|██████▏   | 775/1261 [46:28<24:58,  3.08s/it]
Lanes were detected
 62%|██████▏   | 776/1261 [46:31<24:55,  3.08s/it]
Lanes were detected
 62%|██████▏   | 777/1261 [46:34<24:52,  3.08s/it]
Lanes were detected
 62%|██████▏   | 778/1261 [46:38<24:48,  3.08s/it]
Lanes were detected
 62%|██████▏   | 779/1261 [46:41<24:45,  3.08s/it]
Lanes were detected
 62%|██████▏   | 780/1261 [46:44<24:42,  3.08s/it]
Lanes were detected
 62%|██████▏   | 781/1261 [46:47<24:45,  3.09s/it]
Lanes were detected
 62%|██████▏   | 782/1261 [46:50<24:42,  3.09s/it]
Lanes were detected
 62%|██████▏   | 783/1261 [46:53<24:38,  3.09s/it]
Lanes were detected
 62%|██████▏   | 784/1261 [46:56<24:47,  3.12s/it]
Lanes were detected
 62%|██████▏   | 785/1261 [46:59<25:14,  3.18s/it]
Lanes were detected
 62%|██████▏   | 786/1261 [47:04<28:45,  3.63s/it]
Lanes were detected
 62%|██████▏   | 787/1261 [47:09<30:51,  3.91s/it]
Lanes were detected
 62%|██████▏   | 788/1261 [47:12<29:11,  3.70s/it]
Lanes were detected
 63%|██████▎   | 789/1261 [47:15<28:33,  3.63s/it]
Lanes were detected
 63%|██████▎   | 790/1261 [47:19<27:34,  3.51s/it]
Lanes were detected
 63%|██████▎   | 791/1261 [47:22<27:00,  3.45s/it]
Lanes were detected
 63%|██████▎   | 792/1261 [47:25<26:32,  3.40s/it]
Lanes were detected
 63%|██████▎   | 793/1261 [47:29<27:48,  3.57s/it]
Lanes were detected
 63%|██████▎   | 794/1261 [47:33<27:26,  3.53s/it]
Lanes were detected
 63%|██████▎   | 795/1261 [47:36<26:42,  3.44s/it]
Lanes were detected
 63%|██████▎   | 796/1261 [47:39<26:12,  3.38s/it]
Lanes were detected
 63%|██████▎   | 797/1261 [47:42<26:02,  3.37s/it]
Lanes were detected
 63%|██████▎   | 798/1261 [47:46<25:33,  3.31s/it]
Lanes were detected
 63%|██████▎   | 799/1261 [47:49<25:09,  3.27s/it]
Lanes were detected
 63%|██████▎   | 800/1261 [47:52<24:51,  3.24s/it]
Lanes were detected
 64%|██████▎   | 801/1261 [47:55<24:54,  3.25s/it]
Lanes were detected
 64%|██████▎   | 802/1261 [47:59<25:07,  3.28s/it]
Lanes were detected
 64%|██████▎   | 803/1261 [48:02<25:29,  3.34s/it]
Lanes were detected
 64%|██████▍   | 804/1261 [48:05<25:36,  3.36s/it]
Lanes were detected
 64%|██████▍   | 805/1261 [48:09<26:28,  3.48s/it]
Lanes were detected
 64%|██████▍   | 806/1261 [48:15<30:51,  4.07s/it]
Lanes were detected
 64%|██████▍   | 807/1261 [48:20<32:37,  4.31s/it]
Lanes were detected
 64%|██████▍   | 808/1261 [48:24<32:10,  4.26s/it]
Lanes were detected
 64%|██████▍   | 809/1261 [48:27<30:19,  4.02s/it]
Lanes were detected
 64%|██████▍   | 810/1261 [48:31<29:03,  3.87s/it]
Lanes were detected
 64%|██████▍   | 811/1261 [48:35<29:02,  3.87s/it]
Lanes were detected
 64%|██████▍   | 812/1261 [48:38<28:05,  3.75s/it]
Lanes were detected
 64%|██████▍   | 813/1261 [48:41<27:24,  3.67s/it]
Lanes were detected
 65%|██████▍   | 814/1261 [48:45<26:48,  3.60s/it]
Lanes were detected
 65%|██████▍   | 815/1261 [48:48<26:02,  3.50s/it]
Lanes were detected
 65%|██████▍   | 816/1261 [48:52<25:56,  3.50s/it]
Lanes were detected
 65%|██████▍   | 817/1261 [48:55<26:12,  3.54s/it]
Lanes were detected
 65%|██████▍   | 818/1261 [48:59<26:58,  3.65s/it]
Lanes were detected
 65%|██████▍   | 819/1261 [49:03<26:33,  3.61s/it]
Lanes were detected
 65%|██████▌   | 820/1261 [49:06<25:57,  3.53s/it]
Lanes were detected
 65%|██████▌   | 821/1261 [49:09<25:33,  3.49s/it]
Lanes were detected
 65%|██████▌   | 822/1261 [49:13<25:50,  3.53s/it]
Lanes were detected
 65%|██████▌   | 823/1261 [49:17<26:00,  3.56s/it]
Lanes were detected
 65%|██████▌   | 824/1261 [49:20<25:47,  3.54s/it]
Lanes were detected
 65%|██████▌   | 825/1261 [49:24<25:40,  3.53s/it]
Lanes were detected
 66%|██████▌   | 826/1261 [49:27<25:45,  3.55s/it]
Lanes were detected
 66%|██████▌   | 827/1261 [49:31<26:29,  3.66s/it]
Lanes were detected
 66%|██████▌   | 828/1261 [49:35<26:08,  3.62s/it]
Lanes were detected
 66%|██████▌   | 829/1261 [49:38<25:28,  3.54s/it]
Lanes were detected
 66%|██████▌   | 830/1261 [49:42<25:07,  3.50s/it]
Lanes were detected
 66%|██████▌   | 831/1261 [49:45<24:50,  3.47s/it]
Lanes were detected
 66%|██████▌   | 832/1261 [49:49<25:09,  3.52s/it]
Lanes were detected
 66%|██████▌   | 833/1261 [49:52<24:51,  3.49s/it]
Lanes were detected
 66%|██████▌   | 834/1261 [49:56<25:01,  3.52s/it]
Lanes were detected
 66%|██████▌   | 835/1261 [49:59<24:43,  3.48s/it]
Lanes were detected
 66%|██████▋   | 836/1261 [50:03<24:56,  3.52s/it]
Lanes were detected
 66%|██████▋   | 837/1261 [50:06<25:04,  3.55s/it]
Lanes were detected
 66%|██████▋   | 838/1261 [50:10<25:10,  3.57s/it]
Lanes were detected
 67%|██████▋   | 839/1261 [50:13<24:35,  3.50s/it]
Lanes were detected
 67%|██████▋   | 840/1261 [50:17<24:44,  3.53s/it]
Lanes were detected
 67%|██████▋   | 841/1261 [50:20<23:58,  3.42s/it]
Lanes were detected
 67%|██████▋   | 842/1261 [50:23<23:19,  3.34s/it]
Lanes were detected
 67%|██████▋   | 843/1261 [50:26<22:45,  3.27s/it]
Lanes were detected
 67%|██████▋   | 844/1261 [50:29<22:22,  3.22s/it]
Lanes were detected
 67%|██████▋   | 845/1261 [50:32<22:06,  3.19s/it]
Lanes were detected
 67%|██████▋   | 846/1261 [50:36<21:51,  3.16s/it]
Lanes were detected
 67%|██████▋   | 847/1261 [50:39<21:38,  3.14s/it]
Lanes were detected
 67%|██████▋   | 848/1261 [50:42<21:32,  3.13s/it]
Lanes were detected
 67%|██████▋   | 849/1261 [50:45<21:23,  3.11s/it]
Lanes were detected
 67%|██████▋   | 850/1261 [50:48<21:15,  3.10s/it]
Lanes were detected
 67%|██████▋   | 851/1261 [50:51<21:10,  3.10s/it]
Lanes were detected
 68%|██████▊   | 852/1261 [50:54<21:07,  3.10s/it]
Lanes were detected
 68%|██████▊   | 853/1261 [50:57<21:05,  3.10s/it]
Lanes were detected
 68%|██████▊   | 854/1261 [51:00<21:15,  3.13s/it]
Lanes were detected
 68%|██████▊   | 855/1261 [51:04<21:46,  3.22s/it]
Lanes were detected
 68%|██████▊   | 856/1261 [51:07<22:07,  3.28s/it]
Lanes were detected
 68%|██████▊   | 857/1261 [51:11<22:22,  3.32s/it]
Lanes were detected
 68%|██████▊   | 858/1261 [51:14<22:14,  3.31s/it]
Lanes were detected
 68%|██████▊   | 859/1261 [51:17<21:52,  3.27s/it]
Lanes were detected
 68%|██████▊   | 860/1261 [51:20<21:32,  3.22s/it]
Lanes were detected
 68%|██████▊   | 861/1261 [51:23<21:13,  3.18s/it]
Lanes were detected
 68%|██████▊   | 862/1261 [51:26<21:00,  3.16s/it]
Lanes were detected
 68%|██████▊   | 863/1261 [51:30<21:36,  3.26s/it]
Lanes were detected
 69%|██████▊   | 864/1261 [51:33<21:49,  3.30s/it]
Lanes were detected
 69%|██████▊   | 865/1261 [51:36<21:36,  3.27s/it]
Lanes were detected
 69%|██████▊   | 866/1261 [51:40<21:15,  3.23s/it]
Lanes were detected
 69%|██████▉   | 867/1261 [51:43<21:30,  3.28s/it]
Lanes were detected
 69%|██████▉   | 868/1261 [51:46<21:39,  3.31s/it]
Lanes were detected
 69%|██████▉   | 869/1261 [51:50<22:13,  3.40s/it]
Lanes were detected
 69%|██████▉   | 870/1261 [51:53<22:14,  3.41s/it]
Lanes were detected
 69%|██████▉   | 871/1261 [51:57<22:26,  3.45s/it]
Lanes were detected
 69%|██████▉   | 872/1261 [52:00<22:24,  3.46s/it]
Lanes were detected
 69%|██████▉   | 873/1261 [52:04<21:55,  3.39s/it]
Lanes were detected
 69%|██████▉   | 874/1261 [52:07<21:23,  3.32s/it]
Lanes were detected
 69%|██████▉   | 875/1261 [52:10<21:05,  3.28s/it]
Lanes were detected
 69%|██████▉   | 876/1261 [52:13<20:47,  3.24s/it]
Lanes were detected
 70%|██████▉   | 877/1261 [52:16<20:28,  3.20s/it]
Lanes were detected
 70%|██████▉   | 878/1261 [52:19<20:14,  3.17s/it]
Lanes were detected
 70%|██████▉   | 879/1261 [52:22<20:06,  3.16s/it]
Lanes were detected
 70%|██████▉   | 880/1261 [52:26<20:00,  3.15s/it]
Lanes were detected
 70%|██████▉   | 881/1261 [52:29<19:53,  3.14s/it]
Lanes were detected
 70%|██████▉   | 882/1261 [52:32<19:51,  3.14s/it]
Lanes were detected
 70%|███████   | 883/1261 [52:35<19:49,  3.15s/it]
Lanes were detected
 70%|███████   | 884/1261 [52:38<19:42,  3.14s/it]
Lanes were detected
 70%|███████   | 885/1261 [52:41<19:36,  3.13s/it]
Lanes were detected
 70%|███████   | 886/1261 [52:44<19:32,  3.13s/it]
Lanes were detected
 70%|███████   | 887/1261 [52:48<20:08,  3.23s/it]
Lanes weren't detected
 70%|███████   | 888/1261 [52:53<23:04,  3.71s/it]
Lanes weren't detected
 70%|███████   | 889/1261 [52:57<23:16,  3.76s/it]
Lanes weren't detected
 71%|███████   | 890/1261 [53:01<23:52,  3.86s/it]
Lanes weren't detected
 71%|███████   | 891/1261 [53:04<22:46,  3.69s/it]
Lanes were detected
 71%|███████   | 892/1261 [53:08<23:32,  3.83s/it]
Lanes weren't detected
 71%|███████   | 893/1261 [53:13<25:49,  4.21s/it]
Lanes were detected
 71%|███████   | 894/1261 [53:18<25:59,  4.25s/it]
Lanes weren't detected
 71%|███████   | 895/1261 [53:23<28:16,  4.64s/it]
Lanes weren't detected
 71%|███████   | 896/1261 [53:27<26:58,  4.43s/it]
Lanes weren't detected
 71%|███████   | 897/1261 [53:35<32:58,  5.44s/it]
Lanes weren't detected
 71%|███████   | 898/1261 [53:40<32:21,  5.35s/it]
Lanes weren't detected
 71%|███████▏  | 899/1261 [53:44<30:44,  5.10s/it]
Lanes weren't detected
 71%|███████▏  | 900/1261 [53:49<28:49,  4.79s/it]
Lanes weren't detected
 71%|███████▏  | 901/1261 [53:53<28:46,  4.79s/it]
Lanes weren't detected
 72%|███████▏  | 902/1261 [53:59<29:20,  4.90s/it]
Lanes weren't detected
 72%|███████▏  | 903/1261 [54:03<28:15,  4.74s/it]
Lanes weren't detected
 72%|███████▏  | 904/1261 [54:07<26:57,  4.53s/it]
Lanes were detected
 72%|███████▏  | 905/1261 [54:11<26:09,  4.41s/it]
Lanes weren't detected
 72%|███████▏  | 906/1261 [54:15<25:28,  4.31s/it]
Lanes weren't detected
 72%|███████▏  | 907/1261 [54:20<26:19,  4.46s/it]
Lanes weren't detected
 72%|███████▏  | 908/1261 [54:25<26:58,  4.58s/it]
Lanes weren't detected
 72%|███████▏  | 909/1261 [54:29<27:00,  4.60s/it]
Lanes weren't detected
 72%|███████▏  | 910/1261 [54:34<26:40,  4.56s/it]
Lanes weren't detected
 72%|███████▏  | 911/1261 [54:38<25:47,  4.42s/it]
Lanes weren't detected
 72%|███████▏  | 912/1261 [54:42<24:59,  4.30s/it]
Lanes weren't detected
 72%|███████▏  | 913/1261 [54:47<26:01,  4.49s/it]
Lanes weren't detected
 72%|███████▏  | 914/1261 [54:52<26:19,  4.55s/it]
Lanes weren't detected
 73%|███████▎  | 915/1261 [54:56<25:39,  4.45s/it]
Lanes weren't detected
 73%|███████▎  | 916/1261 [54:59<23:58,  4.17s/it]
Lanes weren't detected
 73%|███████▎  | 917/1261 [55:03<23:02,  4.02s/it]
Lanes weren't detected
 73%|███████▎  | 918/1261 [55:09<26:24,  4.62s/it]
Lanes weren't detected
 73%|███████▎  | 919/1261 [55:14<27:36,  4.84s/it]
Lanes weren't detected
 73%|███████▎  | 920/1261 [55:20<28:47,  5.07s/it]
Lanes weren't detected
 73%|███████▎  | 921/1261 [55:26<29:26,  5.20s/it]
Lanes weren't detected
 73%|███████▎  | 922/1261 [55:32<31:38,  5.60s/it]
Lanes weren't detected
 73%|███████▎  | 923/1261 [55:38<31:32,  5.60s/it]
Lanes weren't detected
 73%|███████▎  | 924/1261 [55:42<28:52,  5.14s/it]
Lanes weren't detected
 73%|███████▎  | 925/1261 [55:46<27:15,  4.87s/it]
Lanes weren't detected
 73%|███████▎  | 926/1261 [55:50<26:23,  4.73s/it]
Lanes weren't detected
 74%|███████▎  | 927/1261 [55:55<25:56,  4.66s/it]
Lanes weren't detected
 74%|███████▎  | 928/1261 [55:59<25:16,  4.55s/it]
Lanes weren't detected
 74%|███████▎  | 929/1261 [56:03<24:29,  4.43s/it]
Lanes weren't detected
 74%|███████▍  | 930/1261 [56:07<23:54,  4.33s/it]
Lanes weren't detected
 74%|███████▍  | 931/1261 [56:12<23:46,  4.32s/it]
Lanes weren't detected
 74%|███████▍  | 932/1261 [56:16<23:14,  4.24s/it]
Lanes weren't detected
 74%|███████▍  | 933/1261 [56:21<24:42,  4.52s/it]
Lanes weren't detected
 74%|███████▍  | 934/1261 [56:25<24:27,  4.49s/it]
Lanes weren't detected
 74%|███████▍  | 935/1261 [56:29<23:15,  4.28s/it]
Lanes weren't detected
 74%|███████▍  | 936/1261 [56:33<23:03,  4.26s/it]
Lanes weren't detected
 74%|███████▍  | 937/1261 [56:37<22:23,  4.15s/it]
Lanes weren't detected
 74%|███████▍  | 938/1261 [56:43<24:10,  4.49s/it]
Lanes were detected
 74%|███████▍  | 939/1261 [56:47<23:25,  4.36s/it]
Lanes weren't detected
 75%|███████▍  | 940/1261 [56:50<22:37,  4.23s/it]
Lanes weren't detected
 75%|███████▍  | 941/1261 [56:54<22:06,  4.15s/it]
Lanes weren't detected
 75%|███████▍  | 942/1261 [56:58<21:51,  4.11s/it]
Lanes weren't detected
 75%|███████▍  | 943/1261 [57:02<21:32,  4.06s/it]
Lanes weren't detected
 75%|███████▍  | 944/1261 [57:06<21:13,  4.02s/it]
Lanes weren't detected
 75%|███████▍  | 945/1261 [57:10<20:57,  3.98s/it]
Lanes weren't detected
 75%|███████▌  | 946/1261 [57:14<20:52,  3.98s/it]
Lanes weren't detected
 75%|███████▌  | 947/1261 [57:18<20:52,  3.99s/it]
Lanes weren't detected
 75%|███████▌  | 948/1261 [57:22<20:58,  4.02s/it]
Lanes weren't detected
 75%|███████▌  | 949/1261 [57:28<22:52,  4.40s/it]
Lanes weren't detected
 75%|███████▌  | 950/1261 [57:33<24:02,  4.64s/it]
Lanes were detected
 75%|███████▌  | 951/1261 [57:37<23:28,  4.54s/it]
Lanes weren't detected
 75%|███████▌  | 952/1261 [57:41<22:55,  4.45s/it]
Lanes weren't detected
 76%|███████▌  | 953/1261 [57:46<22:43,  4.43s/it]
Lanes weren't detected
 76%|███████▌  | 954/1261 [57:50<22:36,  4.42s/it]
Lanes weren't detected
 76%|███████▌  | 955/1261 [57:54<22:01,  4.32s/it]
Lanes weren't detected
 76%|███████▌  | 956/1261 [57:58<21:38,  4.26s/it]
Lanes weren't detected
 76%|███████▌  | 957/1261 [58:02<21:18,  4.21s/it]
Lanes weren't detected
 76%|███████▌  | 958/1261 [58:06<20:47,  4.12s/it]
Lanes weren't detected
 76%|███████▌  | 959/1261 [58:10<20:42,  4.11s/it]
Lanes weren't detected
 76%|███████▌  | 960/1261 [58:15<20:51,  4.16s/it]
Lanes weren't detected
 76%|███████▌  | 961/1261 [58:20<22:35,  4.52s/it]
Lanes were detected
 76%|███████▋  | 962/1261 [58:25<23:40,  4.75s/it]
Lanes were detected
 76%|███████▋  | 963/1261 [58:30<23:30,  4.73s/it]
Lanes weren't detected
 76%|███████▋  | 964/1261 [58:34<22:23,  4.52s/it]
Lanes weren't detected
 77%|███████▋  | 965/1261 [58:38<21:47,  4.42s/it]
Lanes were detected
 77%|███████▋  | 966/1261 [58:45<25:19,  5.15s/it]
Lanes weren't detected
 77%|███████▋  | 967/1261 [58:51<25:38,  5.23s/it]
Lanes weren't detected
 77%|███████▋  | 968/1261 [58:55<23:52,  4.89s/it]
Lanes weren't detected
 77%|███████▋  | 969/1261 [58:59<23:00,  4.73s/it]
Lanes weren't detected
 77%|███████▋  | 970/1261 [59:04<22:56,  4.73s/it]
Lanes weren't detected
 77%|███████▋  | 971/1261 [59:08<21:50,  4.52s/it]
Lanes weren't detected
 77%|███████▋  | 972/1261 [59:12<21:41,  4.50s/it]
Lanes weren't detected
 77%|███████▋  | 973/1261 [59:16<21:03,  4.39s/it]
Lanes weren't detected
 77%|███████▋  | 974/1261 [59:20<20:06,  4.20s/it]
Lanes weren't detected
 77%|███████▋  | 975/1261 [59:24<19:54,  4.18s/it]
Lanes weren't detected
 77%|███████▋  | 976/1261 [59:30<22:34,  4.75s/it]
Lanes weren't detected
 77%|███████▋  | 977/1261 [59:35<22:11,  4.69s/it]
Lanes weren't detected
 78%|███████▊  | 978/1261 [59:40<22:10,  4.70s/it]
Lanes weren't detected
 78%|███████▊  | 979/1261 [59:45<23:03,  4.91s/it]
Lanes weren't detected
 78%|███████▊  | 980/1261 [59:49<21:47,  4.65s/it]
Lanes weren't detected
 78%|███████▊  | 981/1261 [59:53<20:42,  4.44s/it]
Lanes weren't detected
 78%|███████▊  | 982/1261 [59:57<19:48,  4.26s/it]
Lanes weren't detected
 78%|███████▊  | 983/1261 [1:00:01<19:15,  4.16s/it]
Lanes weren't detected
 78%|███████▊  | 984/1261 [1:00:05<18:57,  4.11s/it]
Lanes weren't detected
 78%|███████▊  | 985/1261 [1:00:09<18:30,  4.02s/it]
Lanes weren't detected
 78%|███████▊  | 986/1261 [1:00:12<18:16,  3.99s/it]
Lanes weren't detected
 78%|███████▊  | 987/1261 [1:00:16<18:15,  4.00s/it]
Lanes weren't detected
 78%|███████▊  | 988/1261 [1:00:21<18:20,  4.03s/it]
Lanes weren't detected
 78%|███████▊  | 989/1261 [1:00:25<18:17,  4.03s/it]
Lanes weren't detected
 79%|███████▊  | 990/1261 [1:00:28<18:00,  3.99s/it]
Lanes weren't detected
 79%|███████▊  | 991/1261 [1:00:32<17:53,  3.98s/it]
Lanes weren't detected
 79%|███████▊  | 992/1261 [1:00:36<17:53,  3.99s/it]
Lanes weren't detected
 79%|███████▊  | 993/1261 [1:00:40<17:47,  3.98s/it]
Lanes weren't detected
 79%|███████▉  | 994/1261 [1:00:44<17:47,  4.00s/it]
Lanes weren't detected
 79%|███████▉  | 995/1261 [1:00:48<17:38,  3.98s/it]
Lanes weren't detected
 79%|███████▉  | 996/1261 [1:00:52<17:43,  4.01s/it]
Lanes weren't detected
 79%|███████▉  | 997/1261 [1:00:56<17:24,  3.96s/it]
Lanes weren't detected
 79%|███████▉  | 998/1261 [1:01:00<17:36,  4.02s/it]
Lanes weren't detected
 79%|███████▉  | 999/1261 [1:01:04<17:22,  3.98s/it]
Lanes weren't detected
 79%|███████▉  | 1000/1261 [1:01:08<17:08,  3.94s/it]
Lanes weren't detected
 79%|███████▉  | 1001/1261 [1:01:12<17:11,  3.97s/it]
Lanes weren't detected
 79%|███████▉  | 1002/1261 [1:01:16<17:04,  3.95s/it]
Lanes weren't detected
 80%|███████▉  | 1003/1261 [1:01:20<16:54,  3.93s/it]
Lanes were detected
 80%|███████▉  | 1004/1261 [1:01:24<17:00,  3.97s/it]
Lanes were detected
 80%|███████▉  | 1005/1261 [1:01:28<17:19,  4.06s/it]
Lanes were detected
 80%|███████▉  | 1006/1261 [1:01:33<17:43,  4.17s/it]
Lanes were detected
 80%|███████▉  | 1007/1261 [1:01:37<17:39,  4.17s/it]
Lanes were detected
 80%|███████▉  | 1008/1261 [1:01:41<17:21,  4.12s/it]
Lanes were detected
 80%|████████  | 1009/1261 [1:01:45<17:04,  4.07s/it]
Lanes were detected
 80%|████████  | 1010/1261 [1:01:49<16:49,  4.02s/it]
Lanes were detected
 80%|████████  | 1011/1261 [1:01:53<16:50,  4.04s/it]
Lanes were detected
 80%|████████  | 1012/1261 [1:01:57<16:35,  4.00s/it]
Lanes were detected
 80%|████████  | 1013/1261 [1:02:01<16:42,  4.04s/it]
Lanes were detected
 80%|████████  | 1014/1261 [1:02:05<16:34,  4.03s/it]
Lanes were detected
 80%|████████  | 1015/1261 [1:02:09<16:06,  3.93s/it]
Lanes were detected
 81%|████████  | 1016/1261 [1:02:13<16:21,  4.01s/it]
Lanes were detected
 81%|████████  | 1017/1261 [1:02:17<16:22,  4.03s/it]
Lanes were detected
 81%|████████  | 1018/1261 [1:02:21<16:17,  4.02s/it]
Lanes were detected
 81%|████████  | 1019/1261 [1:02:25<16:08,  4.00s/it]
Lanes were detected
 81%|████████  | 1020/1261 [1:02:29<15:47,  3.93s/it]
Lanes were detected
 81%|████████  | 1021/1261 [1:02:33<15:49,  3.96s/it]
Lanes were detected
 81%|████████  | 1022/1261 [1:02:37<15:54,  3.99s/it]
Lanes were detected
 81%|████████  | 1023/1261 [1:02:41<15:42,  3.96s/it]
Lanes were detected
 81%|████████  | 1024/1261 [1:02:44<15:32,  3.94s/it]
Lanes were detected
 81%|████████▏ | 1025/1261 [1:02:48<15:26,  3.93s/it]
Lanes were detected
 81%|████████▏ | 1026/1261 [1:02:53<15:58,  4.08s/it]
Lanes were detected
 81%|████████▏ | 1027/1261 [1:02:57<16:16,  4.17s/it]
Lanes were detected
 82%|████████▏ | 1028/1261 [1:03:01<16:06,  4.15s/it]
Lanes were detected
 82%|████████▏ | 1029/1261 [1:03:05<15:42,  4.06s/it]
Lanes were detected
 82%|████████▏ | 1030/1261 [1:03:09<15:36,  4.06s/it]
Lanes were detected
 82%|████████▏ | 1031/1261 [1:03:13<15:27,  4.03s/it]
Lanes were detected
 82%|████████▏ | 1032/1261 [1:03:17<15:33,  4.08s/it]
Lanes were detected
 82%|████████▏ | 1033/1261 [1:03:21<15:24,  4.06s/it]
Lanes were detected
 82%|████████▏ | 1034/1261 [1:03:25<15:10,  4.01s/it]
Lanes were detected
 82%|████████▏ | 1035/1261 [1:03:30<15:33,  4.13s/it]
Lanes were detected
 82%|████████▏ | 1036/1261 [1:03:34<15:49,  4.22s/it]
Lanes were detected
 82%|████████▏ | 1037/1261 [1:03:38<15:34,  4.17s/it]
Lanes were detected
 82%|████████▏ | 1038/1261 [1:03:43<16:05,  4.33s/it]
Lanes were detected
 82%|████████▏ | 1039/1261 [1:03:46<15:09,  4.10s/it]
Lanes were detected
 82%|████████▏ | 1040/1261 [1:03:50<14:44,  4.00s/it]
Lanes were detected
 83%|████████▎ | 1041/1261 [1:03:54<14:41,  4.01s/it]
Lanes were detected
 83%|████████▎ | 1042/1261 [1:03:58<14:30,  3.98s/it]
Lanes were detected
 83%|████████▎ | 1043/1261 [1:04:04<15:58,  4.40s/it]
Lanes were detected
 83%|████████▎ | 1044/1261 [1:04:08<15:58,  4.42s/it]
Lanes were detected
 83%|████████▎ | 1045/1261 [1:04:13<17:00,  4.72s/it]
Lanes were detected
 83%|████████▎ | 1046/1261 [1:04:17<15:54,  4.44s/it]
Lanes were detected
 83%|████████▎ | 1047/1261 [1:04:21<14:57,  4.19s/it]
Lanes were detected
 83%|████████▎ | 1048/1261 [1:04:25<14:39,  4.13s/it]
Lanes were detected
 83%|████████▎ | 1049/1261 [1:04:28<14:05,  3.99s/it]
Lanes were detected
 83%|████████▎ | 1050/1261 [1:04:32<13:38,  3.88s/it]
Lanes were detected
 83%|████████▎ | 1051/1261 [1:04:36<13:20,  3.81s/it]
Lanes were detected
 83%|████████▎ | 1052/1261 [1:04:39<13:02,  3.75s/it]
Lanes were detected
 84%|████████▎ | 1053/1261 [1:04:43<12:51,  3.71s/it]
Lanes were detected
 84%|████████▎ | 1054/1261 [1:04:47<12:43,  3.69s/it]
Lanes were detected
 84%|████████▎ | 1055/1261 [1:04:50<12:29,  3.64s/it]
Lanes were detected
 84%|████████▎ | 1056/1261 [1:04:54<12:35,  3.69s/it]
Lanes were detected
 84%|████████▍ | 1057/1261 [1:04:58<12:27,  3.67s/it]
Lanes were detected
 84%|████████▍ | 1058/1261 [1:05:01<12:19,  3.64s/it]
Lanes were detected
 84%|████████▍ | 1059/1261 [1:05:05<12:28,  3.70s/it]
Lanes were detected
 84%|████████▍ | 1060/1261 [1:05:09<12:17,  3.67s/it]
Lanes were detected
 84%|████████▍ | 1061/1261 [1:05:12<12:07,  3.64s/it]
Lanes were detected
 84%|████████▍ | 1062/1261 [1:05:16<12:13,  3.68s/it]
Lanes were detected
 84%|████████▍ | 1063/1261 [1:05:19<12:02,  3.65s/it]
Lanes were detected
 84%|████████▍ | 1064/1261 [1:05:23<11:58,  3.65s/it]
Lanes were detected
 84%|████████▍ | 1065/1261 [1:05:27<11:58,  3.67s/it]
Lanes were detected
 85%|████████▍ | 1066/1261 [1:05:31<12:18,  3.79s/it]
Lanes were detected
 85%|████████▍ | 1067/1261 [1:05:35<12:25,  3.84s/it]
Lanes were detected
 85%|████████▍ | 1068/1261 [1:05:39<12:12,  3.79s/it]
Lanes were detected
 85%|████████▍ | 1069/1261 [1:05:42<11:58,  3.74s/it]
Lanes were detected
 85%|████████▍ | 1070/1261 [1:05:46<11:51,  3.72s/it]
Lanes were detected
 85%|████████▍ | 1071/1261 [1:05:49<11:41,  3.69s/it]
Lanes were detected
 85%|████████▌ | 1072/1261 [1:05:53<11:34,  3.67s/it]
Lanes were detected
 85%|████████▌ | 1073/1261 [1:05:57<11:46,  3.76s/it]
Lanes were detected
 85%|████████▌ | 1074/1261 [1:06:02<12:29,  4.01s/it]
Lanes were detected
 85%|████████▌ | 1075/1261 [1:06:06<12:24,  4.00s/it]
Lanes were detected
 85%|████████▌ | 1076/1261 [1:06:09<12:07,  3.93s/it]
Lanes were detected
 85%|████████▌ | 1077/1261 [1:06:16<14:17,  4.66s/it]
Lanes were detected
 85%|████████▌ | 1078/1261 [1:06:22<15:13,  4.99s/it]
Lanes were detected
 86%|████████▌ | 1079/1261 [1:06:27<15:27,  5.10s/it]
Lanes were detected
 86%|████████▌ | 1080/1261 [1:06:31<14:12,  4.71s/it]
Lanes were detected
 86%|████████▌ | 1081/1261 [1:06:34<13:09,  4.39s/it]
Lanes were detected
 86%|████████▌ | 1082/1261 [1:06:38<12:27,  4.18s/it]
Lanes were detected
 86%|████████▌ | 1083/1261 [1:06:42<11:56,  4.03s/it]
Lanes were detected
 86%|████████▌ | 1084/1261 [1:06:45<11:33,  3.92s/it]
Lanes were detected
 86%|████████▌ | 1085/1261 [1:06:49<11:11,  3.82s/it]
Lanes were detected
 86%|████████▌ | 1086/1261 [1:06:53<11:02,  3.78s/it]
Lanes were detected
 86%|████████▌ | 1087/1261 [1:06:56<10:44,  3.70s/it]
Lanes were detected
 86%|████████▋ | 1088/1261 [1:07:00<11:02,  3.83s/it]
Lanes were detected
 86%|████████▋ | 1089/1261 [1:07:05<11:40,  4.08s/it]
Lanes were detected
 86%|████████▋ | 1090/1261 [1:07:11<13:43,  4.82s/it]
Lanes were detected
 87%|████████▋ | 1091/1261 [1:07:17<14:34,  5.15s/it]
Lanes were detected
 87%|████████▋ | 1092/1261 [1:07:21<13:20,  4.74s/it]
Lanes were detected
 87%|████████▋ | 1093/1261 [1:07:25<12:23,  4.42s/it]
Lanes were detected
 87%|████████▋ | 1094/1261 [1:07:29<12:03,  4.34s/it]
Lanes were detected
 87%|████████▋ | 1095/1261 [1:07:33<11:48,  4.27s/it]
Lanes were detected
 87%|████████▋ | 1096/1261 [1:07:37<11:20,  4.12s/it]
Lanes were detected
 87%|████████▋ | 1097/1261 [1:07:41<10:51,  3.98s/it]
Lanes were detected
 87%|████████▋ | 1098/1261 [1:07:44<10:30,  3.87s/it]
Lanes were detected
 87%|████████▋ | 1099/1261 [1:07:48<10:10,  3.77s/it]
Lanes were detected
 87%|████████▋ | 1100/1261 [1:07:51<10:05,  3.76s/it]
Lanes were detected
 87%|████████▋ | 1101/1261 [1:07:55<10:12,  3.83s/it]
Lanes were detected
 87%|████████▋ | 1102/1261 [1:07:59<09:57,  3.76s/it]
Lanes were detected
 87%|████████▋ | 1103/1261 [1:08:03<09:46,  3.71s/it]
Lanes were detected
 88%|████████▊ | 1104/1261 [1:08:06<09:35,  3.66s/it]
Lanes were detected
 88%|████████▊ | 1105/1261 [1:08:10<09:34,  3.68s/it]
Lanes were detected
 88%|████████▊ | 1106/1261 [1:08:14<09:28,  3.67s/it]
Lanes were detected
 88%|████████▊ | 1107/1261 [1:08:17<09:19,  3.63s/it]
Lanes were detected
 88%|████████▊ | 1108/1261 [1:08:21<09:19,  3.65s/it]
Lanes were detected
 88%|████████▊ | 1109/1261 [1:08:24<09:11,  3.63s/it]
Lanes were detected
 88%|████████▊ | 1110/1261 [1:08:28<09:09,  3.64s/it]
Lanes were detected
 88%|████████▊ | 1111/1261 [1:08:32<09:04,  3.63s/it]
Lanes were detected
 88%|████████▊ | 1112/1261 [1:08:35<08:55,  3.59s/it]
Lanes were detected
 88%|████████▊ | 1113/1261 [1:08:39<08:54,  3.61s/it]
Lanes were detected
 88%|████████▊ | 1114/1261 [1:08:42<08:45,  3.57s/it]
Lanes were detected
 88%|████████▊ | 1115/1261 [1:08:46<08:47,  3.62s/it]
Lanes were detected
 89%|████████▊ | 1116/1261 [1:08:49<08:38,  3.57s/it]
Lanes were detected
 89%|████████▊ | 1117/1261 [1:08:53<08:29,  3.54s/it]
Lanes were detected
 89%|████████▊ | 1118/1261 [1:08:57<08:53,  3.73s/it]
Lanes were detected
 89%|████████▊ | 1119/1261 [1:09:01<08:46,  3.71s/it]
Lanes were detected
 89%|████████▉ | 1120/1261 [1:09:04<08:40,  3.69s/it]
Lanes were detected
 89%|████████▉ | 1121/1261 [1:09:08<08:36,  3.69s/it]
Lanes were detected
 89%|████████▉ | 1122/1261 [1:09:12<08:25,  3.64s/it]
Lanes were detected
 89%|████████▉ | 1123/1261 [1:09:15<08:20,  3.62s/it]
Lanes were detected
 89%|████████▉ | 1124/1261 [1:09:19<08:21,  3.66s/it]
Lanes were detected
 89%|████████▉ | 1125/1261 [1:09:22<08:12,  3.62s/it]
Lanes were detected
 89%|████████▉ | 1126/1261 [1:09:26<08:03,  3.58s/it]
Lanes were detected
 89%|████████▉ | 1127/1261 [1:09:30<08:19,  3.73s/it]
Lanes were detected
 89%|████████▉ | 1128/1261 [1:09:34<08:19,  3.75s/it]
Lanes were detected
 90%|████████▉ | 1129/1261 [1:09:37<08:10,  3.72s/it]
Lanes were detected
 90%|████████▉ | 1130/1261 [1:09:41<08:03,  3.69s/it]
Lanes were detected
 90%|████████▉ | 1131/1261 [1:09:45<07:51,  3.63s/it]
Lanes were detected
 90%|████████▉ | 1132/1261 [1:09:48<07:48,  3.63s/it]
Lanes were detected
 90%|████████▉ | 1133/1261 [1:09:52<07:39,  3.59s/it]
Lanes were detected
 90%|████████▉ | 1134/1261 [1:09:55<07:30,  3.55s/it]
Lanes were detected
 90%|█████████ | 1135/1261 [1:09:59<07:29,  3.57s/it]
Lanes were detected
 90%|█████████ | 1136/1261 [1:10:02<07:28,  3.58s/it]
Lanes were detected
 90%|█████████ | 1137/1261 [1:10:06<07:22,  3.57s/it]
Lanes were detected
 90%|█████████ | 1138/1261 [1:10:10<07:19,  3.57s/it]
Lanes were detected
 90%|█████████ | 1139/1261 [1:10:13<07:16,  3.58s/it]
Lanes were detected
 90%|█████████ | 1140/1261 [1:10:17<07:11,  3.56s/it]
Lanes were detected
 90%|█████████ | 1141/1261 [1:10:20<07:10,  3.59s/it]
Lanes were detected
 91%|█████████ | 1142/1261 [1:10:24<07:03,  3.56s/it]
Lanes were detected
 91%|█████████ | 1143/1261 [1:10:27<06:58,  3.54s/it]
Lanes were detected
 91%|█████████ | 1144/1261 [1:10:31<06:55,  3.55s/it]
Lanes were detected
 91%|█████████ | 1145/1261 [1:10:34<06:51,  3.55s/it]
Lanes were detected
 91%|█████████ | 1146/1261 [1:10:38<06:47,  3.55s/it]
Lanes were detected
 91%|█████████ | 1147/1261 [1:10:41<06:45,  3.56s/it]
Lanes were detected
 91%|█████████ | 1148/1261 [1:10:45<06:42,  3.56s/it]
Lanes were detected
 91%|█████████ | 1149/1261 [1:10:49<06:38,  3.55s/it]
Lanes were detected
 91%|█████████ | 1150/1261 [1:10:52<06:33,  3.54s/it]
Lanes were detected
 91%|█████████▏| 1151/1261 [1:10:56<06:28,  3.54s/it]
Lanes were detected
 91%|█████████▏| 1152/1261 [1:10:59<06:31,  3.60s/it]
Lanes were detected
 91%|█████████▏| 1153/1261 [1:11:03<06:25,  3.57s/it]
Lanes were detected
 92%|█████████▏| 1154/1261 [1:11:07<06:31,  3.65s/it]
Lanes were detected
 92%|█████████▏| 1155/1261 [1:11:11<06:37,  3.75s/it]
Lanes were detected
 92%|█████████▏| 1156/1261 [1:11:14<06:31,  3.73s/it]
Lanes were detected
 92%|█████████▏| 1157/1261 [1:11:18<06:21,  3.67s/it]
Lanes were detected
 92%|█████████▏| 1158/1261 [1:11:21<06:13,  3.63s/it]
Lanes were detected
 92%|█████████▏| 1159/1261 [1:11:25<06:10,  3.63s/it]
Lanes were detected
 92%|█████████▏| 1160/1261 [1:11:29<06:13,  3.70s/it]
Lanes were detected
 92%|█████████▏| 1161/1261 [1:11:33<06:13,  3.74s/it]
Lanes were detected
 92%|█████████▏| 1162/1261 [1:11:37<06:11,  3.75s/it]
Lanes were detected
 92%|█████████▏| 1163/1261 [1:11:40<06:00,  3.68s/it]
Lanes were detected
 92%|█████████▏| 1164/1261 [1:11:44<05:51,  3.63s/it]
Lanes were detected
 92%|█████████▏| 1165/1261 [1:11:47<05:51,  3.66s/it]
Lanes were detected
 92%|█████████▏| 1166/1261 [1:11:51<05:44,  3.63s/it]
Lanes were detected
 93%|█████████▎| 1167/1261 [1:11:54<05:36,  3.58s/it]
Lanes were detected
 93%|█████████▎| 1168/1261 [1:11:58<05:38,  3.63s/it]
Lanes were detected
 93%|█████████▎| 1169/1261 [1:12:02<05:31,  3.61s/it]
Lanes were detected
 93%|█████████▎| 1170/1261 [1:12:05<05:26,  3.58s/it]
Lanes were detected
 93%|█████████▎| 1171/1261 [1:12:09<05:28,  3.65s/it]
Lanes were detected
 93%|█████████▎| 1172/1261 [1:12:13<05:21,  3.61s/it]
Lanes were detected
 93%|█████████▎| 1173/1261 [1:12:16<05:16,  3.60s/it]
Lanes were detected
 93%|█████████▎| 1174/1261 [1:12:20<05:15,  3.63s/it]
Lanes were detected
 93%|█████████▎| 1175/1261 [1:12:23<05:09,  3.60s/it]
Lanes were detected
 93%|█████████▎| 1176/1261 [1:12:27<05:02,  3.56s/it]
Lanes were detected
 93%|█████████▎| 1177/1261 [1:12:31<05:03,  3.62s/it]
Lanes were detected
 93%|█████████▎| 1178/1261 [1:12:34<04:57,  3.58s/it]
Lanes were detected
 93%|█████████▎| 1179/1261 [1:12:38<04:51,  3.56s/it]
Lanes were detected
 94%|█████████▎| 1180/1261 [1:12:41<04:50,  3.59s/it]
Lanes were detected
 94%|█████████▎| 1181/1261 [1:12:45<04:43,  3.55s/it]
Lanes were detected
 94%|█████████▎| 1182/1261 [1:12:48<04:38,  3.53s/it]
Lanes were detected
 94%|█████████▍| 1183/1261 [1:12:52<04:42,  3.62s/it]
Lanes were detected
 94%|█████████▍| 1184/1261 [1:12:55<04:36,  3.59s/it]
Lanes were detected
 94%|█████████▍| 1185/1261 [1:12:59<04:30,  3.56s/it]
Lanes were detected
 94%|█████████▍| 1186/1261 [1:13:03<04:31,  3.61s/it]
Lanes were detected
 94%|█████████▍| 1187/1261 [1:13:06<04:26,  3.60s/it]
Lanes were detected
 94%|█████████▍| 1188/1261 [1:13:10<04:20,  3.56s/it]
Lanes were detected
 94%|█████████▍| 1189/1261 [1:13:13<04:15,  3.54s/it]
Lanes were detected
 94%|█████████▍| 1190/1261 [1:13:17<04:14,  3.58s/it]
Lanes were detected
 94%|█████████▍| 1191/1261 [1:13:21<04:11,  3.59s/it]
Lanes were detected
 95%|█████████▍| 1192/1261 [1:13:24<04:07,  3.58s/it]
Lanes were detected
 95%|█████████▍| 1193/1261 [1:13:28<04:09,  3.67s/it]
Lanes were detected
 95%|█████████▍| 1194/1261 [1:13:32<04:12,  3.76s/it]
Lanes were detected
 95%|█████████▍| 1195/1261 [1:13:36<04:06,  3.73s/it]
Lanes were detected
 95%|█████████▍| 1196/1261 [1:13:39<03:59,  3.68s/it]
Lanes were detected
 95%|█████████▍| 1197/1261 [1:13:43<04:01,  3.77s/it]
Lanes were detected
 95%|█████████▌| 1198/1261 [1:13:47<03:57,  3.77s/it]
Lanes were detected
 95%|█████████▌| 1199/1261 [1:13:50<03:48,  3.69s/it]
Lanes were detected
 95%|█████████▌| 1200/1261 [1:13:54<03:44,  3.69s/it]
Lanes were detected
 95%|█████████▌| 1201/1261 [1:13:58<03:45,  3.75s/it]
Lanes were detected
 95%|█████████▌| 1202/1261 [1:14:02<03:37,  3.69s/it]
Lanes were detected
 95%|█████████▌| 1203/1261 [1:14:05<03:32,  3.67s/it]
Lanes were detected
 95%|█████████▌| 1204/1261 [1:14:09<03:25,  3.61s/it]
Lanes were detected
 96%|█████████▌| 1205/1261 [1:14:12<03:21,  3.61s/it]
Lanes were detected
 96%|█████████▌| 1206/1261 [1:14:16<03:18,  3.62s/it]
Lanes were detected
 96%|█████████▌| 1207/1261 [1:14:19<03:15,  3.61s/it]
Lanes were detected
 96%|█████████▌| 1208/1261 [1:14:23<03:12,  3.63s/it]
Lanes were detected
 96%|█████████▌| 1209/1261 [1:14:27<03:07,  3.60s/it]
Lanes were detected
 96%|█████████▌| 1210/1261 [1:14:30<03:02,  3.57s/it]
Lanes were detected
 96%|█████████▌| 1211/1261 [1:14:34<02:57,  3.56s/it]
Lanes were detected
 96%|█████████▌| 1212/1261 [1:14:37<02:55,  3.59s/it]
Lanes were detected
 96%|█████████▌| 1213/1261 [1:14:41<02:50,  3.55s/it]
Lanes were detected
 96%|█████████▋| 1214/1261 [1:14:44<02:46,  3.54s/it]
Lanes were detected
 96%|█████████▋| 1215/1261 [1:14:48<02:42,  3.54s/it]
Lanes were detected
 96%|█████████▋| 1216/1261 [1:14:52<02:40,  3.56s/it]
Lanes were detected
 97%|█████████▋| 1217/1261 [1:14:55<02:36,  3.57s/it]
Lanes were detected
 97%|█████████▋| 1218/1261 [1:14:59<02:32,  3.56s/it]
Lanes were detected
 97%|█████████▋| 1219/1261 [1:15:02<02:29,  3.55s/it]
Lanes were detected
 97%|█████████▋| 1220/1261 [1:15:06<02:24,  3.54s/it]
Lanes were detected
 97%|█████████▋| 1221/1261 [1:15:09<02:23,  3.58s/it]
Lanes were detected
 97%|█████████▋| 1222/1261 [1:15:13<02:18,  3.55s/it]
Lanes were detected
 97%|█████████▋| 1223/1261 [1:15:16<02:14,  3.54s/it]
Lanes were detected
 97%|█████████▋| 1224/1261 [1:15:20<02:10,  3.53s/it]
Lanes were detected
 97%|█████████▋| 1225/1261 [1:15:23<02:07,  3.53s/it]
Lanes were detected
 97%|█████████▋| 1226/1261 [1:15:27<02:05,  3.58s/it]
Lanes were detected
 97%|█████████▋| 1227/1261 [1:15:31<02:04,  3.67s/it]
Lanes were detected
 97%|█████████▋| 1228/1261 [1:15:35<02:01,  3.67s/it]
Lanes were detected
 97%|█████████▋| 1229/1261 [1:15:38<01:56,  3.63s/it]
Lanes were detected
 98%|█████████▊| 1230/1261 [1:15:42<01:51,  3.59s/it]
Lanes were detected
 98%|█████████▊| 1231/1261 [1:15:45<01:46,  3.56s/it]
Lanes were detected
 98%|█████████▊| 1232/1261 [1:15:49<01:43,  3.58s/it]
Lanes were detected
 98%|█████████▊| 1233/1261 [1:15:53<01:42,  3.67s/it]
Lanes were detected
 98%|█████████▊| 1234/1261 [1:15:56<01:36,  3.57s/it]
Lanes were detected
 98%|█████████▊| 1235/1261 [1:15:59<01:30,  3.49s/it]
Lanes were detected
 98%|█████████▊| 1236/1261 [1:16:03<01:25,  3.44s/it]
Lanes were detected
 98%|█████████▊| 1237/1261 [1:16:06<01:21,  3.40s/it]
Lanes were detected
 98%|█████████▊| 1238/1261 [1:16:09<01:17,  3.38s/it]
Lanes were detected
 98%|█████████▊| 1239/1261 [1:16:13<01:14,  3.40s/it]
Lanes were detected
 98%|█████████▊| 1240/1261 [1:16:16<01:11,  3.41s/it]
Lanes were detected
 98%|█████████▊| 1241/1261 [1:16:20<01:08,  3.41s/it]
Lanes were detected
 98%|█████████▊| 1242/1261 [1:16:23<01:05,  3.42s/it]
Lanes were detected
 99%|█████████▊| 1243/1261 [1:16:26<01:01,  3.42s/it]
Lanes were detected
 99%|█████████▊| 1244/1261 [1:16:30<00:57,  3.39s/it]
Lanes were detected
 99%|█████████▊| 1245/1261 [1:16:33<00:53,  3.37s/it]
Lanes were detected
 99%|█████████▉| 1246/1261 [1:16:36<00:50,  3.36s/it]
Lanes were detected
 99%|█████████▉| 1247/1261 [1:16:40<00:47,  3.37s/it]
Lanes were detected
 99%|█████████▉| 1248/1261 [1:16:43<00:43,  3.35s/it]
Lanes were detected
 99%|█████████▉| 1249/1261 [1:16:46<00:40,  3.34s/it]
Lanes were detected
 99%|█████████▉| 1250/1261 [1:16:50<00:36,  3.33s/it]
Lanes were detected
 99%|█████████▉| 1251/1261 [1:16:53<00:33,  3.33s/it]
Lanes were detected
 99%|█████████▉| 1252/1261 [1:16:56<00:30,  3.34s/it]
Lanes were detected
 99%|█████████▉| 1253/1261 [1:17:00<00:26,  3.33s/it]
Lanes were detected
 99%|█████████▉| 1254/1261 [1:17:03<00:23,  3.34s/it]
Lanes were detected
100%|█████████▉| 1255/1261 [1:17:06<00:19,  3.33s/it]
Lanes were detected
100%|█████████▉| 1256/1261 [1:17:10<00:16,  3.33s/it]
Lanes were detected
100%|█████████▉| 1257/1261 [1:17:13<00:13,  3.32s/it]
Lanes were detected
100%|█████████▉| 1258/1261 [1:17:16<00:09,  3.32s/it]
Lanes were detected
100%|█████████▉| 1259/1261 [1:17:20<00:06,  3.31s/it]
Lanes were detected
100%|█████████▉| 1260/1261 [1:17:23<00:03,  3.31s/it]
Lanes were detected

[MoviePy] Done.
[MoviePy] >>>> Video ready: project_video_result_combined.mp4 

CPU times: user 1h 9min 36s, sys: 6min 20s, total: 1h 15min 56s
Wall time: 1h 17min 24s
In [ ]: